You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/06/18 19:56:33 UTC

svn commit: r1494231 - in /hive/trunk/ql/src/test: queries/clientpositive/ results/clientpositive/

Author: hashutosh
Date: Tue Jun 18 17:56:32 2013
New Revision: 1494231

URL: http://svn.apache.org/r1494231
Log:
HIVE-4746 : Fix TestCliDriver.list_bucket_dml_{2,4,5,9,12,13}.q on 0.23 (Brock Noland via Ashutosh Chauhan)

Modified:
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
    hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_12.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_12.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_12.q Tue Jun 18 17:56:32 2013
@@ -29,14 +29,14 @@ desc formatted list_bucketing_mul_col pa
 set hive.optimize.listbucketing=true;
 explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466";
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr;
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466";
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr;
 
 explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382";
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr;
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382";
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr;
 
 drop table list_bucketing_mul_col;

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_13.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_13.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_13.q Tue Jun 18 17:56:32 2013
@@ -29,8 +29,8 @@ desc formatted list_bucketing_mul_col pa
 set hive.optimize.listbucketing=true;
 explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466";
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr;
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466";
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr;
 
 drop table list_bucketing_mul_col;

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_2.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_2.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_2.q Tue Jun 18 17:56:32 2013
@@ -46,16 +46,16 @@ select count(*) from list_bucketing_stat
 set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
 set hive.optimize.listbucketing=true;
 explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value;
 
 -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
 -- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51';
-select * from list_bucketing_static_part where key = '51';
-select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14';
-select * from list_bucketing_static_part where key = '51' and value = 'val_14';
+select * from srcpart where ds = '2008-04-08' and key = '51' ORDER BY key, value;
+select * from list_bucketing_static_part where key = '51' ORDER BY key, value, ds, hr;
+select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'  ORDER BY key, value;
+select * from list_bucketing_static_part where key = '51' and value = 'val_14' ORDER BY key, value, ds, hr;
 
 -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
 select count(1) from srcpart where ds = '2008-04-08' and key < '51';

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_4.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_4.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_4.q Tue Jun 18 17:56:32 2013
@@ -63,9 +63,9 @@ select count(*) from list_bucketing_stat
 set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
 set hive.optimize.listbucketing=true;
 explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value;
 
 -- clean up
 drop table list_bucketing_static_part;

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_5.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_5.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_5.q Tue Jun 18 17:56:32 2013
@@ -28,11 +28,11 @@ desc formatted list_bucketing_dynamic_pa
 select count(1) from srcpart where ds='2008-04-08';
 select count(1) from list_bucketing_dynamic_part where ds='2008-04-08';
 
-select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103";
+select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value;
 set hive.optimize.listbucketing=true;
 explain extended
-select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103";
-select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103";
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr;
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr;
 
 -- clean up resources
 drop table list_bucketing_dynamic_part;

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_9.q?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_9.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_9.q Tue Jun 18 17:56:32 2013
@@ -63,9 +63,9 @@ select count(*) from list_bucketing_stat
 set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
 set hive.optimize.listbucketing=true;
 explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr;
 
 -- clean up
 drop table list_bucketing_static_part;

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out Tue Jun 18 17:56:32 2013
@@ -247,11 +247,11 @@ Storage Desc Params:	 	 
 	serialization.format	1                   
 PREHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col1 EXPRESSION []
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -259,7 +259,7 @@ POSTHOOK: Lineage: list_bucketing_mul_co
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col5 EXPRESSION []
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL col2) "466")) (= (TOK_TABLE_OR_COL col4) "val_466")))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL col2) "466")) (= (TOK_TABLE_OR_COL col4) "val_466"))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col4)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -295,23 +295,33 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col1
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col4
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -362,6 +372,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_mul_col
             name: default.list_bucketing_mul_col
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
+                  columns.types string:string:string:string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=466/col4=val_466 [list_bucketing_mul_col]
 
@@ -371,13 +400,13 @@ STAGE PLANS:
 
 
 PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_mul_col
 PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_mul_col
 POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
@@ -392,11 +421,11 @@ POSTHOOK: Lineage: list_bucketing_mul_co
 1	466	1	val_466	1	2008-04-08	11
 PREHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col1 EXPRESSION []
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -404,7 +433,7 @@ POSTHOOK: Lineage: list_bucketing_mul_co
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col5 EXPRESSION []
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL col2) "382")) (= (TOK_TABLE_OR_COL col4) "val_382")))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL col2) "382")) (= (TOK_TABLE_OR_COL col4) "val_382"))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col4)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -440,23 +469,33 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col1
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col4
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -507,6 +546,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_mul_col
             name: default.list_bucketing_mul_col
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
+                  columns.types string:string:string:string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_mul_col/ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME [list_bucketing_mul_col]
 
@@ -516,13 +574,13 @@ STAGE PLANS:
 
 
 PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_mul_col
 PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_mul_col
 POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out Tue Jun 18 17:56:32 2013
@@ -247,11 +247,11 @@ Storage Desc Params:	 	 
 	serialization.format	1                   
 PREHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
 select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col1 EXPRESSION []
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -259,7 +259,7 @@ POSTHOOK: Lineage: list_bucketing_mul_co
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col5 EXPRESSION []
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '2013-01-23+18:00:99')) (= (TOK_TABLE_OR_COL col2) "466")) (= (TOK_TABLE_OR_COL col4) "val_466")))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_mul_col))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '2013-01-23+18:00:99')) (= (TOK_TABLE_OR_COL col2) "466")) (= (TOK_TABLE_OR_COL col4) "val_466"))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL col4)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -295,23 +295,33 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col1
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                        expr: _col4
+                        type: string
+                        expr: _col5
+                        type: string
+                        expr: _col6
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -362,6 +372,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_mul_col
             name: default.list_bucketing_mul_col
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
+                  columns.types string:string:string:string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=466/col4=val_466 [list_bucketing_mul_col]
 
@@ -371,13 +400,13 @@ STAGE PLANS:
 
 
 PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_mul_col
 PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
 #### A masked pattern was here ####
 POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466" ORDER BY col2, col4, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_mul_col
 POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out Tue Jun 18 17:56:32 2013
@@ -329,15 +329,15 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 1000
 PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484')))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -367,23 +367,27 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3
-                        columns.types string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -434,6 +438,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_static_part
             name: default.list_bucketing_static_part
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3
+                  columns.types string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484 [list_bucketing_static_part]
 
@@ -442,12 +465,12 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_static_part
 PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_static_part
 POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
@@ -456,13 +479,13 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 484	val_484	2008-04-08	11
 484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -470,11 +493,11 @@ POSTHOOK: Input: default@srcpart@ds=2008
 #### A masked pattern was here ####
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-484	val_484	2008-04-08	12
 484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
 PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
 -- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
+select * from srcpart where ds = '2008-04-08' and key = '51' ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -482,7 +505,7 @@ PREHOOK: Input: default@srcpart@ds=2008-
 #### A masked pattern was here ####
 POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
 -- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
+select * from srcpart where ds = '2008-04-08' and key = '51' ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -490,16 +513,16 @@ POSTHOOK: Input: default@srcpart@ds=2008
 #### A masked pattern was here ####
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-51	val_51	2008-04-08	12
-51	val_51	2008-04-08	12
 51	val_51	2008-04-08	11
 51	val_51	2008-04-08	11
-PREHOOK: query: select * from list_bucketing_static_part where key = '51'
+51	val_51	2008-04-08	12
+51	val_51	2008-04-08	12
+PREHOOK: query: select * from list_bucketing_static_part where key = '51' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_static_part
 PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
+POSTHOOK: query: select * from list_bucketing_static_part where key = '51' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_static_part
 POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
@@ -510,13 +533,13 @@ POSTHOOK: Lineage: list_bucketing_static
 51	val_51	2008-04-08	11
 51	val_51	2008-04-08	11
 51	val_51	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'  ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'  ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -524,12 +547,12 @@ POSTHOOK: Input: default@srcpart@ds=2008
 #### A masked pattern was here ####
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
+PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_static_part
 PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
+POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_static_part
 POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out Tue Jun 18 17:56:32 2013
@@ -738,17 +738,17 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 1000
 PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484')))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -778,23 +778,27 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3
-                        columns.types string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -845,6 +849,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_static_part
             name: default.list_bucketing_static_part
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3
+                  columns.types string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484 [list_bucketing_static_part]
 
@@ -853,12 +876,12 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_static_part
 PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_static_part
 POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
@@ -869,13 +892,13 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 484	val_484	2008-04-08	11
 484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -885,8 +908,8 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-484	val_484	2008-04-08	12
 484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
 PREHOOK: query: -- clean up
 drop table list_bucketing_static_part
 PREHOOK: type: DROPTABLE

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out Tue Jun 18 17:56:32 2013
@@ -358,13 +358,13 @@ POSTHOOK: Lineage: list_bucketing_dynami
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 1000
-PREHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
+PREHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
+POSTHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -379,17 +379,17 @@ POSTHOOK: Lineage: list_bucketing_dynami
 103	val_103
 103	val_103
 PREHOOK: query: explain extended
-select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
-select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_dynamic_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL key) "103")) (= (TOK_TABLE_OR_COL value) "val_103")))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_dynamic_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_TABLE_OR_COL hr))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL key) "103")) (= (TOK_TABLE_OR_COL value) "val_103"))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -414,24 +414,32 @@ STAGE PLANS:
                       type: string
                       expr: value
                       type: string
-                outputColumnNames: _col0, _col1
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1
-                        columns.types string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                      expr: ds
+                      type: string
+                      expr: hr
+                      type: string
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -528,6 +536,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.list_bucketing_dynamic_part
             name: default.list_bucketing_dynamic_part
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3
+                  columns.types string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=103/value=val_103 [list_bucketing_dynamic_part]
         /list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=103/value=val_103 [list_bucketing_dynamic_part]
@@ -537,13 +564,13 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+PREHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_dynamic_part
 PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
 PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select key, value from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+POSTHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103" ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_dynamic_part
 POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
@@ -553,10 +580,10 @@ POSTHOOK: Lineage: list_bucketing_dynami
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-103	val_103
-103	val_103
-103	val_103
-103	val_103
+103	val_103	2008-04-08	11
+103	val_103	2008-04-08	11
+103	val_103	2008-04-08	12
+103	val_103	2008-04-08	12
 PREHOOK: query: -- clean up resources
 drop table list_bucketing_dynamic_part
 PREHOOK: type: DROPTABLE

Modified: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out?rev=1494231&r1=1494230&r2=1494231&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out Tue Jun 18 17:56:32 2013
@@ -738,17 +738,17 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 1000
 PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484')))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME list_bucketing_static_part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) '11')) (= (TOK_TABLE_OR_COL key) '484')) (= (TOK_TABLE_OR_COL value) 'val_484'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -778,23 +778,27 @@ STAGE PLANS:
                       expr: hr
                       type: string
                 outputColumnNames: _col0, _col1, _col2, _col3
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3
-                        columns.types string:string:string:string
-                        escape.delim \
-                        serialization.format 1
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
+                  sort order: ++++
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+                        expr: _col3
+                        type: string
       Needs Tagging: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -845,6 +849,25 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
               name: default.list_bucketing_static_part
             name: default.list_bucketing_static_part
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2,_col3
+                  columns.types string:string:string:string
+                  escape.delim \
+                  serialization.format 1
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
       Truncated Path -> Alias:
         /list_bucketing_static_part/ds=2008-04-08/hr=11/key=484 [list_bucketing_static_part]
 
@@ -853,12 +876,12 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@list_bucketing_static_part
 PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@list_bucketing_static_part
 POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
@@ -869,13 +892,13 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 484	val_484	2008-04-08	11
 484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' ORDER BY key, value, ds, hr
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
@@ -885,8 +908,8 @@ POSTHOOK: Lineage: list_bucketing_static
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-484	val_484	2008-04-08	12
 484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
 PREHOOK: query: -- clean up
 drop table list_bucketing_static_part
 PREHOOK: type: DROPTABLE