You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/11/29 04:44:28 UTC

svn commit: r1642395 [7/22] - in /hive/branches/spark/ql/src: java/org/apache/hadoop/hive/ql/exec/spark/ java/org/apache/hadoop/hive/ql/exec/spark/session/ test/results/clientpositive/ test/results/clientpositive/spark/

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join21.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join21.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join21.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join21.q.out Sat Nov 29 03:44:22 2014
@@ -17,6 +17,26 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
+                  alias: src1
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Spark HashTable Sink Operator
+                    condition expressions:
+                      0 {value}
+                      1 {key} {value}
+                      2 {key} {value}
+                    filter predicates:
+                      0 {(key < 10)}
+                      1 
+                      2 {(key < 10)}
+                    keys:
+                      0 key (type: string)
+                      1 key (type: string)
+                      2 key (type: string)
+            Local Work:
+              Map Reduce Local Work
+        Map 2 
+            Map Operator Tree:
+                TableScan
                   alias: src2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
@@ -37,34 +57,14 @@ STAGE PLANS:
                         2 key (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Spark HashTable Sink Operator
-                    condition expressions:
-                      0 {value}
-                      1 {key} {value}
-                      2 {key} {value}
-                    filter predicates:
-                      0 {(key < 10)}
-                      1 
-                      2 {(key < 10)}
-                    keys:
-                      0 key (type: string)
-                      1 key (type: string)
-                      2 key (type: string)
-            Local Work:
-              Map Reduce Local Work
 
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (SORT, 1)
+        Reducer 4 <- Map 3 (SORT, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -87,8 +87,8 @@ STAGE PLANS:
                       2 key (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11
                     input vertices:
-                      0 Map 4
-                      1 Map 1
+                      0 Map 1
+                      1 Map 2
                     Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string)
@@ -100,7 +100,7 @@ STAGE PLANS:
                         Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 4 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join22.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join22.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join22.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join22.q.out Sat Nov 29 03:44:22 2014
@@ -14,7 +14,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -53,7 +53,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -72,7 +72,7 @@ STAGE PLANS:
                         1 key (type: string)
                       outputColumnNames: _col0, _col1
                       input vertices:
-                        1 Map 1
+                        1 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join23.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join23.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join23.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join23.q.out Sat Nov 29 03:44:22 2014
@@ -14,7 +14,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -35,10 +35,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (SORT, 1)
+        Reducer 2 <- Map 1 (SORT, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -57,7 +57,7 @@ STAGE PLANS:
                         1 
                       outputColumnNames: _col0, _col1, _col5, _col6
                       input vertices:
-                        1 Map 1
+                        1 Map 3
                       Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
@@ -69,7 +69,7 @@ STAGE PLANS:
                           Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out Sat Nov 29 03:44:22 2014
@@ -31,7 +31,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -53,7 +53,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -72,7 +72,7 @@ STAGE PLANS:
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col6
                       input vertices:
-                        0 Map 2
+                        0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col6 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out Sat Nov 29 03:44:22 2014
@@ -31,7 +31,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -53,7 +53,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -72,7 +72,7 @@ STAGE PLANS:
                         1 value (type: string)
                       outputColumnNames: _col0, _col1, _col6
                       input vertices:
-                        0 Map 2
+                        0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col6 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out Sat Nov 29 03:44:22 2014
@@ -101,35 +101,31 @@ STAGE PLANS:
                       input vertices:
                         0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                      Select Operator
-                        expressions: _col0 (type: string)
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                        Map Join Operator
-                          condition map:
-                               Inner Join 0 to 1
-                          condition expressions:
-                            0 {_col0}
-                            1 {value}
-                          keys:
-                            0 _col0 (type: string)
-                            1 key (type: string)
-                          outputColumnNames: _col0, _col5
-                          input vertices:
-                            1 Map 3
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        condition expressions:
+                          0 {_col0}
+                          1 {value}
+                        keys:
+                          0 _col0 (type: string)
+                          1 key (type: string)
+                        outputColumnNames: _col0, _col5
+                        input vertices:
+                          1 Map 3
+                        Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
+                        Select Operator
+                          expressions: _col0 (type: string), _col5 (type: string)
+                          outputColumnNames: _col0, _col1
                           Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
-                          Select Operator
-                            expressions: _col0 (type: string), _col5 (type: string)
-                            outputColumnNames: _col0, _col1
+                          File Output Operator
+                            compressed: false
                             Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
-                            File Output Operator
-                              compressed: false
-                              Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
-                              table:
-                                  input format: org.apache.hadoop.mapred.TextInputFormat
-                                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                                  name: default.dest_j1
+                            table:
+                                input format: org.apache.hadoop.mapred.TextInputFormat
+                                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                                name: default.dest_j1
             Local Work:
               Map Reduce Local Work
 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out Sat Nov 29 03:44:22 2014
@@ -36,10 +36,10 @@ STAGE PLANS:
   Stage: Stage-3
     Spark
       Edges:
-        Reducer 4 <- Map 3 (GROUP, 1)
+        Reducer 2 <- Map 1 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -47,23 +47,19 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: key
+                    Group By Operator
+                      aggregations: count(1)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count(1)
-                        keys: key (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col1 (type: bigint)
-        Reducer 4 
+                        value expressions: _col1 (type: bigint)
+        Reducer 2 
             Local Work:
               Map Reduce Local Work
             Reduce Operator Tree:
@@ -72,26 +68,22 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: bigint)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 6 Data size: 45 Basic stats: COMPLETE Column stats: NONE
-                  Spark HashTable Sink Operator
-                    condition expressions:
-                      0 {_col1}
-                      1 {_col1}
-                    keys:
-                      0 _col0 (type: string)
-                      1 _col0 (type: string)
+                Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
+                Spark HashTable Sink Operator
+                  condition expressions:
+                    0 {_col1}
+                    1 {_col1}
+                  keys:
+                    0 _col0 (type: string)
+                    1 _col0 (type: string)
 
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
+        Reducer 4 <- Map 3 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -99,23 +91,19 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: key
+                    Group By Operator
+                      aggregations: count(1)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count(1)
-                        keys: key (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col1 (type: bigint)
-        Reducer 2 
+                        value expressions: _col1 (type: bigint)
+        Reducer 4 
             Local Work:
               Map Reduce Local Work
             Reduce Operator Tree:
@@ -124,36 +112,32 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: bigint)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
-                  Map Join Operator
-                    condition map:
-                         Inner Join 0 to 1
-                    condition expressions:
-                      0 {_col0} {_col1}
-                      1 {_col1}
-                    keys:
-                      0 _col0 (type: string)
-                      1 _col0 (type: string)
-                    outputColumnNames: _col0, _col1, _col3
-                    input vertices:
-                      0 Reducer 4
-                    Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), UDFToInteger(_col3) (type: int)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                      File Output Operator
-                        compressed: false
-                        Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                        table:
-                            input format: org.apache.hadoop.mapred.TextInputFormat
-                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            name: default.dest_j1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Map Join Operator
+                  condition map:
+                       Inner Join 0 to 1
+                  condition expressions:
+                    0 {_col0} {_col1}
+                    1 {_col1}
+                  keys:
+                    0 _col0 (type: string)
+                    1 _col0 (type: string)
+                  outputColumnNames: _col0, _col1, _col3
+                  input vertices:
+                    0 Reducer 2
+                  Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), UDFToInteger(_col1) (type: int), UDFToInteger(_col3) (type: int)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          name: default.dest_j1
 
   Stage: Stage-0
     Move Operator

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out Sat Nov 29 03:44:22 2014
@@ -29,7 +29,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -48,7 +48,7 @@ STAGE PLANS:
                         2 key (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -72,7 +72,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -94,8 +94,8 @@ STAGE PLANS:
                         2 key (type: string)
                       outputColumnNames: _col0, _col11
                       input vertices:
-                        1 Map 1
-                        2 Map 2
+                        1 Map 2
+                        2 Map 3
                       Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: UDFToInteger(_col0) (type: int), _col11 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out Sat Nov 29 03:44:22 2014
@@ -29,7 +29,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -50,10 +50,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
+        Reducer 3 <- Map 2 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -72,27 +72,23 @@ STAGE PLANS:
                         1 key (type: string)
                       outputColumnNames: _col0
                       input vertices:
-                        0 Map 3
+                        0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                      Select Operator
-                        expressions: _col0 (type: string)
-                        outputColumnNames: _col0
+                      Group By Operator
+                        aggregations: count(1)
+                        keys: _col0 (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1
                         Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                        Group By Operator
-                          aggregations: count(1)
-                          keys: _col0 (type: string)
-                          mode: hash
-                          outputColumnNames: _col0, _col1
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: string)
                           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                          Reduce Output Operator
-                            key expressions: _col0 (type: string)
-                            sort order: +
-                            Map-reduce partition columns: _col0 (type: string)
-                            Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                            value expressions: _col1 (type: bigint)
+                          value expressions: _col1 (type: bigint)
             Local Work:
               Map Reduce Local Work
-        Reducer 2 
+        Reducer 3 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out Sat Nov 29 03:44:22 2014
@@ -38,10 +38,10 @@ STAGE PLANS:
   Stage: Stage-3
     Spark
       Edges:
-        Reducer 5 <- Map 4 (GROUP, 1)
+        Reducer 2 <- Map 1 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 4 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -49,23 +49,19 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: key
+                    Group By Operator
+                      aggregations: count(1)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count(1)
-                        keys: key (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col1 (type: bigint)
-        Reducer 5 
+                        value expressions: _col1 (type: bigint)
+        Reducer 2 
             Local Work:
               Map Reduce Local Work
             Reduce Operator Tree:
@@ -90,11 +86,11 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-        Reducer 3 <- Reducer 2 (GROUP, 1)
+        Reducer 4 <- Map 3 (GROUP, 1)
+        Reducer 5 <- Reducer 4 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -102,23 +98,19 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: key
+                    Group By Operator
+                      aggregations: count(1)
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count(1)
-                        keys: key (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col1 (type: bigint)
-        Reducer 2 
+                        value expressions: _col1 (type: bigint)
+        Reducer 4 
             Local Work:
               Map Reduce Local Work
             Reduce Operator Tree:
@@ -143,25 +135,21 @@ STAGE PLANS:
                       1 _col0 (type: string)
                     outputColumnNames: _col0
                     input vertices:
-                      0 Reducer 5
+                      0 Reducer 2
                     Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: _col0 (type: string)
-                      outputColumnNames: _col0
+                    Group By Operator
+                      aggregations: count(1)
+                      keys: _col0 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
                       Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count(1)
-                        keys: _col0 (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string)
-                          sort order: +
-                          Map-reduce partition columns: _col0 (type: string)
-                          Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col1 (type: bigint)
-        Reducer 3 
+                        value expressions: _col1 (type: bigint)
+        Reducer 5 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out Sat Nov 29 03:44:22 2014
@@ -71,7 +71,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -93,7 +93,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -112,7 +112,7 @@ STAGE PLANS:
                         1 key (type: int)
                       outputColumnNames: _col0, _col1, _col6
                       input vertices:
-                        1 Map 1
+                        1 Map 2
                       Statistics: Num rows: 170 Data size: 817 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: int), _col6 (type: int)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out Sat Nov 29 03:44:22 2014
@@ -31,7 +31,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -53,7 +53,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: y
@@ -72,7 +72,7 @@ STAGE PLANS:
                         1 key (type: string)
                       outputColumnNames: _col0, _col1, _col6
                       input vertices:
-                        0 Map 2
+                        0 Map 1
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), _col6 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out Sat Nov 29 03:44:22 2014
@@ -58,7 +58,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: b
@@ -79,10 +79,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (GROUP, 3)
+        Reducer 2 <- Map 1 (GROUP, 3)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: a
@@ -101,27 +101,23 @@ STAGE PLANS:
                         1 '111' (type: string)
                       outputColumnNames: _col1, _col10
                       input vertices:
-                        1 Map 1
+                        1 Map 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                      Select Operator
-                        expressions: _col1 (type: string), _col10 (type: string)
-                        outputColumnNames: _col1, _col10
+                      Group By Operator
+                        aggregations: count(1)
+                        keys: _col1 (type: string), _col10 (type: string)
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
                         Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                        Group By Operator
-                          aggregations: count(1)
-                          keys: _col1 (type: string), _col10 (type: string)
-                          mode: hash
-                          outputColumnNames: _col0, _col1, _col2
+                        Reduce Output Operator
+                          key expressions: _col0 (type: string), _col1 (type: string)
+                          sort order: ++
+                          Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
                           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                          Reduce Output Operator
-                            key expressions: _col0 (type: string), _col1 (type: string)
-                            sort order: ++
-                            Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                            Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                            value expressions: _col2 (type: bigint)
+                          value expressions: _col2 (type: bigint)
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 2 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out Sat Nov 29 03:44:22 2014
@@ -31,7 +31,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -57,7 +57,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -73,7 +73,7 @@ STAGE PLANS:
                       1 _col0 (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6
                     input vertices:
-                      1 Map 2
+                      1 Map 1
                     Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join40.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join40.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join40.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join40.q.out Sat Nov 29 03:44:22 2014
@@ -18,7 +18,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -44,7 +44,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -60,7 +60,7 @@ STAGE PLANS:
                       1 _col0 (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6
                     input vertices:
-                      1 Map 2
+                      1 Map 1
                     Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
@@ -674,7 +674,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -696,7 +696,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -715,7 +715,7 @@ STAGE PLANS:
                         1 key (type: string)
                       outputColumnNames: _col0, _col6
                       input vertices:
-                        1 Map 1
+                        1 Map 2
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col6 (type: string)
@@ -1796,15 +1796,15 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: src2
+                  alias: src1
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key < 10) (type: boolean)
                     Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {key} {value}
-                        1 {value}
+                        0 {value}
+                        1 {key} {value}
                         2 {key} {value}
                       filter predicates:
                         0 
@@ -1816,18 +1816,18 @@ STAGE PLANS:
                         2 key (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 4 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: src1
+                  alias: src2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key < 10) (type: boolean)
                     Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {value}
-                        1 {key} {value}
+                        0 {key} {value}
+                        1 {value}
                         2 {key} {value}
                       filter predicates:
                         0 
@@ -1843,10 +1843,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (SORT, 3)
+        Reducer 4 <- Map 3 (SORT, 3)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -1869,8 +1869,8 @@ STAGE PLANS:
                       2 key (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11
                     input vertices:
-                      0 Map 4
-                      1 Map 1
+                      0 Map 1
+                      1 Map 2
                     Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string)
@@ -1882,7 +1882,7 @@ STAGE PLANS:
                         Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 4 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string)
@@ -2481,15 +2481,15 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: src2
+                  alias: src1
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((key < 10) and (key < 15)) (type: boolean)
+                    predicate: ((key < 15) and (key < 10)) (type: boolean)
                     Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {key} {value}
-                        1 {value}
+                        0 {value}
+                        1 {key} {value}
                         2 {key} {value}
                       filter predicates:
                         0 
@@ -2501,18 +2501,18 @@ STAGE PLANS:
                         2 key (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 4 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: src1
+                  alias: src2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((key < 15) and (key < 10)) (type: boolean)
+                    predicate: ((key < 10) and (key < 15)) (type: boolean)
                     Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {value}
-                        1 {key} {value}
+                        0 {key} {value}
+                        1 {value}
                         2 {key} {value}
                       filter predicates:
                         0 
@@ -2528,10 +2528,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (SORT, 3)
+        Reducer 4 <- Map 3 (SORT, 3)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src3
@@ -2554,8 +2554,8 @@ STAGE PLANS:
                       2 key (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11
                     input vertices:
-                      0 Map 4
-                      1 Map 1
+                      0 Map 1
+                      1 Map 2
                     Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string), _col10 (type: string), _col11 (type: string)
@@ -2567,7 +2567,7 @@ STAGE PLANS:
                         Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 4 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string)
@@ -3163,7 +3163,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -3189,7 +3189,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -3205,7 +3205,7 @@ STAGE PLANS:
                       1 _col0 (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6
                     input vertices:
-                      1 Map 2
+                      1 Map 1
                     Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
@@ -3819,7 +3819,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: b
@@ -3840,10 +3840,10 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (GROUP, 1)
+        Reducer 2 <- Map 1 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: a
@@ -3861,22 +3861,20 @@ STAGE PLANS:
                         0 key (type: string)
                         1 key (type: string)
                       input vertices:
-                        1 Map 1
+                        1 Map 3
                       Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                      Select Operator
-                        Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-                        Group By Operator
-                          aggregations: count(1)
-                          mode: hash
-                          outputColumnNames: _col0
+                      Group By Operator
+                        aggregations: count(1)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          sort order: 
                           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          Reduce Output Operator
-                            sort order: 
-                            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                            value expressions: _col0 (type: bigint)
+                          value expressions: _col0 (type: bigint)
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 2 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: count(VALUE._col0)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out Sat Nov 29 03:44:22 2014
@@ -24,7 +24,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -46,7 +46,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -62,7 +62,7 @@ STAGE PLANS:
                       1 key (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6
                     input vertices:
-                      1 Map 1
+                      1 Map 2
                     Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
@@ -113,7 +113,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: src2
@@ -135,7 +135,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src1
@@ -151,7 +151,7 @@ STAGE PLANS:
                       1 key (type: string)
                     outputColumnNames: _col0, _col1, _col5, _col6
                     input vertices:
-                      1 Map 1
+                      1 Map 2
                     Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out Sat Nov 29 03:44:22 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_alt_syntax.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_alt_syntax.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join_alt_syntax.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join_alt_syntax.q.out Sat Nov 29 03:44:22 2014
@@ -14,7 +14,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: p2
@@ -33,7 +33,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -49,7 +49,7 @@ STAGE PLANS:
                       1 
                     outputColumnNames: _col1, _col13
                     input vertices:
-                      1 Map 1
+                      1 Map 2
                     Statistics: Num rows: 28 Data size: 3461 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: string), _col13 (type: string)
@@ -89,10 +89,10 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: p3
+                  alias: p2
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -100,18 +100,18 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_name}
-                        1 {p_name}
-                        2 
+                        1 
+                        2 {p_name}
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
                         2 p_name (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: p2
+                  alias: p3
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -119,8 +119,8 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_name}
-                        1 
-                        2 {p_name}
+                        1 {p_name}
+                        2 
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
@@ -132,7 +132,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -155,7 +155,7 @@ STAGE PLANS:
                       outputColumnNames: _col1, _col13, _col25
                       input vertices:
                         1 Map 2
-                        2 Map 1
+                        2 Map 3
                       Statistics: Num rows: 28 Data size: 3460 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: ((_col1 = _col13) and (_col13 = _col25)) (type: boolean)
@@ -221,7 +221,7 @@ STAGE PLANS:
                           2 p_name (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: p3
@@ -245,7 +245,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -268,7 +268,7 @@ STAGE PLANS:
                       outputColumnNames: _col1, _col12, _col14
                       input vertices:
                         1 Map 1
-                        2 Map 2
+                        2 Map 3
                       Statistics: Num rows: 28 Data size: 3460 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: ((_col1 = _col12) and (_col12 = _col14)) (type: boolean)
@@ -334,7 +334,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -369,7 +369,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: p3
@@ -388,7 +388,7 @@ STAGE PLANS:
                         1 p_name (type: string)
                       outputColumnNames: _col0, _col1, _col12, _col13, _col25
                       input vertices:
-                        0 Map 3
+                        0 Map 1
                       Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: (((_col12 + _col0) = _col0) and (_col25 = _col13)) (type: boolean)
@@ -434,7 +434,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: p2
@@ -459,23 +459,6 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: p4
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: p_partkey is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                    Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {_col0} {_col1} {_col12} {_col13} {_col25}
-                        1 {p_name}
-                      keys:
-                        0 _col0 (type: int)
-                        1 p_partkey (type: int)
-            Local Work:
-              Map Reduce Local Work
-        Map 4 
-            Map Operator Tree:
-                TableScan
                   alias: p1
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
@@ -492,7 +475,7 @@ STAGE PLANS:
                         1 p_name (type: string), p_partkey (type: int)
                       outputColumnNames: _col0, _col1, _col12, _col13
                       input vertices:
-                        1 Map 3
+                        1 Map 2
                       Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         condition expressions:
@@ -503,12 +486,29 @@ STAGE PLANS:
                           1 p_name (type: string)
             Local Work:
               Map Reduce Local Work
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: p4
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: p_partkey is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      condition expressions:
+                        0 {_col0} {_col1} {_col12} {_col13} {_col25}
+                        1 {p_name}
+                      keys:
+                        0 _col0 (type: int)
+                        1 p_partkey (type: int)
+            Local Work:
+              Map Reduce Local Work
 
   Stage: Stage-1
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: p3
@@ -527,7 +527,7 @@ STAGE PLANS:
                         1 p_name (type: string)
                       outputColumnNames: _col0, _col1, _col12, _col13, _col25
                       input vertices:
-                        0 Map 4
+                        0 Map 1
                       Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
@@ -540,7 +540,7 @@ STAGE PLANS:
                           1 p_partkey (type: int)
                         outputColumnNames: _col0, _col1, _col12, _col13, _col25, _col36, _col37
                         input vertices:
-                          1 Map 1
+                          1 Map 4
                         Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
                         Filter Operator
                           predicate: (((_col13 = _col25) and (_col0 = _col36)) and (_col0 = _col12)) (type: boolean)
@@ -586,7 +586,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 2 
             Map Operator Tree:
                 TableScan
                   alias: p2
@@ -611,23 +611,6 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: p4
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: p_partkey is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                    Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {_col0} {_col1} {_col12} {_col13} {_col25}
-                        1 {p_name}
-                      keys:
-                        0 _col0 (type: int)
-                        1 p_partkey (type: int)
-            Local Work:
-              Map Reduce Local Work
-        Map 4 
-            Map Operator Tree:
-                TableScan
                   alias: p1
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
@@ -644,7 +627,7 @@ STAGE PLANS:
                         1 p_name (type: string), p_partkey (type: int)
                       outputColumnNames: _col0, _col1, _col12, _col13
                       input vertices:
-                        1 Map 3
+                        1 Map 2
                       Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         condition expressions:
@@ -655,12 +638,29 @@ STAGE PLANS:
                           1 p_name (type: string)
             Local Work:
               Map Reduce Local Work
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: p4
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: p_partkey is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      condition expressions:
+                        0 {_col0} {_col1} {_col12} {_col13} {_col25}
+                        1 {p_name}
+                      keys:
+                        0 _col0 (type: int)
+                        1 p_partkey (type: int)
+            Local Work:
+              Map Reduce Local Work
 
   Stage: Stage-1
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: p3
@@ -679,7 +679,7 @@ STAGE PLANS:
                         1 p_name (type: string)
                       outputColumnNames: _col0, _col1, _col12, _col13, _col25
                       input vertices:
-                        0 Map 4
+                        0 Map 1
                       Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
                       Map Join Operator
                         condition map:
@@ -692,7 +692,7 @@ STAGE PLANS:
                           1 p_partkey (type: int)
                         outputColumnNames: _col0, _col1, _col12, _col13, _col25, _col36, _col37
                         input vertices:
-                          1 Map 1
+                          1 Map 4
                         Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
                         Filter Operator
                           predicate: (((_col13 = _col25) and (_col0 = _col36)) and (_col0 = _col12)) (type: boolean)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out?rev=1642395&r1=1642394&r2=1642395&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join_cond_pushdown_1.q.out Sat Nov 29 03:44:22 2014
@@ -14,10 +14,10 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: p3
+                  alias: p2
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -25,18 +25,18 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        1 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        2 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        2 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
                         2 p_name (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: p2
+                  alias: p3
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -44,8 +44,8 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        2 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        1 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        2 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
@@ -57,7 +57,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -80,7 +80,7 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32
                       input vertices:
                         1 Map 2
-                        2 Map 1
+                        2 Map 3
                       Statistics: Num rows: 28 Data size: 3460 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string)
@@ -118,10 +118,10 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: p3
+                  alias: p2
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -129,18 +129,18 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        1 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        2 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        2 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
                         2 p_name (type: string)
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: p2
+                  alias: p3
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: p_name is not null (type: boolean)
@@ -148,8 +148,8 @@ STAGE PLANS:
                     Spark HashTable Sink Operator
                       condition expressions:
                         0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        2 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        1 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        2 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
                         0 p_name (type: string)
                         1 p_name (type: string)
@@ -161,7 +161,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -184,7 +184,7 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32
                       input vertices:
                         1 Map 2
-                        2 Map 1
+                        2 Map 3
                       Statistics: Num rows: 28 Data size: 3460 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string)
@@ -245,7 +245,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -280,7 +280,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: p3
@@ -299,7 +299,7 @@ STAGE PLANS:
                         1 p_name (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32
                       input vertices:
-                        0 Map 3
+                        0 Map 1
                       Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string)
@@ -337,38 +337,38 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: p3
+                  alias: p2
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: p_name is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                    predicate: ((p_partkey = 1) and p_name is not null) (type: boolean)
+                    Statistics: Num rows: 7 Data size: 847 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {_col0} {_col1} {_col2} {_col3} {_col4} {_col5} {_col6} {_col7} {_col8} {_col13} {_col14} {_col15} {_col16} {_col17} {_col18} {_col19} {_col20}
-                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        1 {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
-                        0 _col13 (type: string)
-                        1 p_name (type: string)
+                        0 
+                        1 
             Local Work:
               Map Reduce Local Work
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: p2
+                  alias: p3
                   Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((p_partkey = 1) and p_name is not null) (type: boolean)
-                    Statistics: Num rows: 7 Data size: 847 Basic stats: COMPLETE Column stats: NONE
+                    predicate: p_name is not null (type: boolean)
+                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
                       condition expressions:
-                        0 {p_partkey} {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
-                        1 {p_name} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
+                        0 {_col0} {_col1} {_col2} {_col3} {_col4} {_col5} {_col6} {_col7} {_col8} {_col13} {_col14} {_col15} {_col16} {_col17} {_col18} {_col19} {_col20}
+                        1 {p_partkey} {p_mfgr} {p_brand} {p_type} {p_size} {p_container} {p_retailprice} {p_comment}
                       keys:
-                        0 
-                        1 
+                        0 _col13 (type: string)
+                        1 p_name (type: string)
             Local Work:
               Map Reduce Local Work
 
@@ -376,7 +376,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: p1
@@ -405,7 +405,7 @@ STAGE PLANS:
                         1 p_name (type: string)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32
                       input vertices:
-                        1 Map 1
+                        1 Map 3
                       Statistics: Num rows: 30 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string)