You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2016/12/15 18:36:08 UTC

[3/4] hive git commit: HIVE-15409: Add support for GROUPING function with grouping sets (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_cube1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_cube1.q.out b/ql/src/test/results/clientpositive/groupby_cube1.q.out
index 0258bb8..9eea534 100644
--- a/ql/src/test/results/clientpositive/groupby_cube1.q.out
+++ b/ql/src/test/results/clientpositive/groupby_cube1.q.out
@@ -41,20 +41,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(1)
-                keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+                keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -100,20 +100,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(1)
-                keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+                keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -185,25 +185,25 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(1)
-                keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+                keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: bigint)
+            expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: bigint)
             outputColumnNames: _col0, _col1, _col2, _col3
             Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -269,19 +269,19 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(DISTINCT val)
-                keys: key (type: string), '0' (type: string), val (type: string)
+                keys: key (type: string), 0 (type: int), val (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
                   Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(DISTINCT KEY._col2:0._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col2
           Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
@@ -342,12 +342,12 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(1)
-                keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+                keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
                   Map-reduce partition columns: rand() (type: double)
                   Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -355,7 +355,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -371,7 +371,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
               Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
               Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -379,7 +379,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -452,19 +452,19 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(DISTINCT val)
-                keys: key (type: string), '0' (type: string), val (type: string)
+                keys: key (type: string), 0 (type: int), val (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: int), _col2 (type: string)
                   sort order: +++
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(DISTINCT KEY._col2:0._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -480,7 +480,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: int)
               sort order: ++
               Map-reduce partition columns: _col0 (type: string)
               Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -488,7 +488,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: final
           outputColumnNames: _col0, _col2
           Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
@@ -574,12 +574,12 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(1)
-                keys: key (type: string), val (type: string), '0' (type: string)
+                keys: key (type: string), val (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
                   Map-reduce partition columns: rand() (type: double)
                   Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -590,7 +590,7 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: sum(1)
-                keys: key (type: string), val (type: string), '0' (type: string)
+                keys: key (type: string), val (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -603,7 +603,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -619,7 +619,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
               Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
               Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -627,7 +627,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
@@ -663,7 +663,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
               Map-reduce partition columns: rand() (type: double)
               Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -671,7 +671,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -687,7 +687,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
               Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
               Statistics: Num rows: 4 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -695,7 +695,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out b/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
index 6eaef7e..f6e1b17 100644
--- a/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
+++ b/ql/src/test/results/clientpositive/groupby_cube_multi_gby.q.out
@@ -50,21 +50,21 @@ STAGE PLANS:
               outputColumnNames: key, value
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: key (type: string), value (type: string), '0' (type: string)
+                keys: key (type: string), value (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: key (type: string), value (type: string)
               outputColumnNames: key, value
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: key (type: string), value (type: string), '0' (type: string)
+                keys: key (type: string), value (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
@@ -76,12 +76,12 @@ STAGE PLANS:
                       serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
       Reduce Operator Tree:
         Group By Operator
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), _col2 (type: string)
+            expressions: _col0 (type: string), _col2 (type: int)
             outputColumnNames: _col0, _col1
             Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -111,13 +111,13 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
index 09d52c0..39a9e6c 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets2.q.out
@@ -53,7 +53,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -69,15 +69,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -137,7 +137,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -153,15 +153,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -244,7 +244,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -260,15 +260,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: double)
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -373,7 +373,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 24 Data size: 168 Basic stats: COMPLETE Column stats: NONE
@@ -389,15 +389,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 24 Data size: 168 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: sum(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 12 Data size: 84 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
index 04ece02..8428631 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets3.q.out
@@ -59,20 +59,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 72 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: avg(c), count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
                 Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: struct<count:bigint,sum:double,input:string>), _col4 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: avg(VALUE._col0), count(VALUE._col1)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3, _col4
           Statistics: Num rows: 2 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -118,20 +118,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 72 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: avg(c), count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
                 Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: struct<count:bigint,sum:double,input:string>), _col4 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: avg(VALUE._col0), count(VALUE._col1)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3, _col4
           Statistics: Num rows: 2 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -219,7 +219,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: avg(VALUE._col0), count(VALUE._col1)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3, _col4
           Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
@@ -235,15 +235,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 288 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: struct<count:bigint,sum:double,input:string>), _col4 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: avg(VALUE._col0), count(VALUE._col1)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3, _col4
           Statistics: Num rows: 2 Data size: 144 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
index 095113a..f688da3 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets4.q.out
@@ -56,20 +56,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -133,20 +133,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -203,20 +203,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -280,20 +280,20 @@ STAGE PLANS:
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count()
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -398,7 +398,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -414,15 +414,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -499,7 +499,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -515,15 +515,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
index e91bd41..de019e3 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets5.q.out
@@ -62,7 +62,7 @@ STAGE PLANS:
           Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
           Group By Operator
             aggregations: count()
-            keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+            keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
             mode: hash
             outputColumnNames: _col0, _col1, _col2, _col3
             Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -78,15 +78,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -151,7 +151,7 @@ STAGE PLANS:
           Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
           Group By Operator
             aggregations: count()
-            keys: _col0 (type: string), _col1 (type: string), '0' (type: string)
+            keys: _col0 (type: string), _col1 (type: string), 0 (type: int)
             mode: hash
             outputColumnNames: _col0, _col1, _col2, _col3
             Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -167,15 +167,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
@@ -292,7 +292,7 @@ STAGE PLANS:
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), '0' (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), 0 (type: int)
           mode: partials
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
@@ -308,15 +308,15 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               sort order: +++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
               Statistics: Num rows: 4 Data size: 144 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: final
           outputColumnNames: _col0, _col1, _col3
           Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out
index 9b90dec..8166240 100644
--- a/ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets6.q.out
@@ -41,18 +41,18 @@ STAGE PLANS:
               predicate: (UDFToDouble(a) = 5.0) (type: boolean)
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
@@ -112,18 +112,18 @@ STAGE PLANS:
               predicate: (UDFToDouble(a) = 5.0) (type: boolean)
               Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: a (type: string), b (type: string), '0' (type: string)
+                keys: a (type: string), b (type: string), 0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2
                 Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   sort order: +++
-                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+                  Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   Statistics: Num rows: 2 Data size: 72 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator
-          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
+          keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/89362a14/ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out b/ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out
new file mode 100644
index 0000000..dcc80f7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/groupby_grouping_sets_grouping.q.out
@@ -0,0 +1,765 @@
+PREHOOK: query: CREATE TABLE T1(key INT, value INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@T1
+POSTHOOK: query: CREATE TABLE T1(key INT, value INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@t1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/groupby_groupingid.txt' INTO TABLE T1
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@t1
+PREHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: _col0 (type: int), _col1 (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 9 Data size: 90 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 9 Data size: 90 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), grouping(_col2, 0) (type: tinyint), grouping(_col2, 1) (type: tinyint)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4
+            Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+NULL	NULL	0	0	0
+1	NULL	1	1	0
+1	NULL	3	1	1
+1	1	3	1	1
+2	NULL	1	1	0
+2	2	3	1	1
+3	NULL	1	1	0
+3	NULL	3	1	1
+3	3	3	1	1
+4	NULL	1	1	0
+4	5	3	1	1
+PREHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: _col0 (type: int), _col1 (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), grouping(_col2, 0) (type: tinyint), grouping(_col2, 1) (type: tinyint)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+NULL	NULL	0	0	0
+NULL	NULL	2	0	1
+NULL	1	2	0	1
+NULL	2	2	0	1
+NULL	3	2	0	1
+NULL	5	2	0	1
+1	NULL	1	1	0
+1	NULL	3	1	1
+1	1	3	1	1
+2	NULL	1	1	0
+2	2	3	1	1
+3	NULL	1	1	0
+3	NULL	3	1	1
+3	3	3	1	1
+4	NULL	1	1	0
+4	5	3	1	1
+PREHOOK: query: explain
+select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: _col0 (type: int), _col1 (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (UDFToInteger(grouping(_col2, 0)) = 1) (type: boolean)
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: int), _col1 (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+1	NULL
+1	NULL
+1	1
+2	NULL
+2	2
+3	NULL
+3	NULL
+3	3
+4	NULL
+4	5
+PREHOOK: query: explain
+select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: _col0 (type: int), _col1 (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: ((UDFToInteger(grouping(_col2, 0)) = 1) or (UDFToInteger(grouping(_col2, 1)) = 1)) (type: boolean)
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: int), _col1 (type: int), (grouping(_col2, 0) + grouping(_col2, 1)) (type: tinyint)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col2 (type: tinyint), CASE WHEN ((_col2 = 1)) THEN (_col0) ELSE (null) END (type: int)
+              sort order: -+
+              Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: int)
+      Reduce Operator Tree:
+        Select Operator
+          expressions: VALUE._col0 (type: int), VALUE._col1 (type: int), KEY.reducesinkkey0 (type: tinyint)
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+4	5	2
+3	3	2
+3	NULL	2
+2	2	2
+1	1	2
+1	NULL	2
+NULL	1	1
+NULL	NULL	1
+NULL	5	1
+NULL	3	1
+NULL	2	1
+1	NULL	1
+2	NULL	1
+3	NULL	1
+4	NULL	1
+PREHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: key, value
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: key (type: int), value (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 9 Data size: 90 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 9 Data size: 90 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), grouping(_col2, 0) (type: tinyint), grouping(_col2, 1) (type: tinyint)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4
+            Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 4 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by rollup(key, value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+NULL	NULL	0	0	0
+1	NULL	1	1	0
+1	NULL	3	1	1
+1	1	3	1	1
+2	NULL	1	1	0
+2	2	3	1	1
+3	NULL	1	1	0
+3	NULL	3	1	1
+3	3	3	1	1
+4	NULL	1	1	0
+4	5	3	1	1
+PREHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: key, value
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: key (type: int), value (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                  Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), grouping(_col2, 0) (type: tinyint), grouping(_col2, 1) (type: tinyint)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, `grouping__id`, grouping(key), grouping(value)
+from T1
+group by cube(key, value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+NULL	NULL	0	0	0
+NULL	NULL	2	0	1
+NULL	1	2	0	1
+NULL	2	2	0	1
+NULL	3	2	0	1
+NULL	5	2	0	1
+1	NULL	1	1	0
+1	NULL	3	1	1
+1	1	3	1	1
+2	NULL	1	1	0
+2	2	3	1	1
+3	NULL	1	1	0
+3	NULL	3	1	1
+3	3	3	1	1
+4	NULL	1	1	0
+4	5	3	1	1
+PREHOOK: query: explain
+select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: key, value
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: key (type: int), value (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (grouping(_col2, 0) = 1) (type: boolean)
+                  Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                    sort order: +++
+                    Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                    Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+          pruneGroupingSetId: true
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value
+from T1
+group by cube(key, value)
+having grouping(key) = 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+1	NULL
+1	NULL
+1	1
+2	NULL
+2	2
+3	NULL
+3	NULL
+3	3
+4	NULL
+4	5
+PREHOOK: query: explain
+select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: t1
+            Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int)
+              outputColumnNames: key, value
+              Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: key (type: int), value (type: int), 0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: ((grouping(_col2, 0) = 1) or (grouping(_col2, 1) = 1)) (type: boolean)
+                  Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                    sort order: +++
+                    Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                    Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: int), _col1 (type: int), (grouping(_col2, 0) + grouping(_col2, 1)) (type: tinyint)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col2 (type: tinyint), CASE WHEN ((_col2 = 1)) THEN (_col0) END (type: int)
+              sort order: -+
+              Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: int)
+      Reduce Operator Tree:
+        Select Operator
+          expressions: VALUE._col0 (type: int), VALUE._col1 (type: int), KEY.reducesinkkey0 (type: tinyint)
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 60 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, grouping(key)+grouping(value) as x
+from T1
+group by cube(key, value)
+having grouping(key) = 1 OR grouping(value) = 1
+order by x desc, case when x = 1 then key end
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1
+#### A masked pattern was here ####
+4	5	2
+3	3	2
+3	NULL	2
+2	2	2
+1	1	2
+1	NULL	2
+NULL	1	1
+NULL	NULL	1
+NULL	5	1
+NULL	3	1
+NULL	2	1
+1	NULL	1
+2	NULL	1
+3	NULL	1
+4	NULL	1