You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/12/29 03:14:00 UTC

svn commit: r1648280 [25/25] - in /hive/branches/spark: data/conf/spark/ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/ ql/src/test/results/clientpositive/spark/

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out Mon Dec 29 02:13:57 2014
@@ -41,9 +41,6 @@ STAGE PLANS:
                     predicate: ((l_partkey is not null and l_orderkey is not null) and (l_linenumber = 1)) (type: boolean)
                     Statistics: Num rows: 12 Data size: 1439 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {_col0}
-                        1 {l_orderkey} {l_suppkey}
                       keys:
                         0 _col0 (type: int)
                         1 l_partkey (type: int)
@@ -67,9 +64,6 @@ STAGE PLANS:
                         outputColumnNames: _col0
                         Statistics: Num rows: 25 Data size: 2999 Basic stats: COMPLETE Column stats: NONE
                         Spark HashTable Sink Operator
-                          condition expressions:
-                            0 {_col0} {_col3}
-                            1 
                           keys:
                             0 _col1 (type: int)
                             1 _col0 (type: int)
@@ -108,40 +102,34 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0
-                Statistics: Num rows: 50 Data size: 5999 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 25 Data size: 2999 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
-                  condition expressions:
-                    0 {_col0}
-                    1 {l_orderkey} {l_suppkey}
                   keys:
                     0 _col0 (type: int)
                     1 l_partkey (type: int)
                   outputColumnNames: _col0, _col1, _col3
                   input vertices:
                     1 Map 3
-                  Statistics: Num rows: 55 Data size: 6598 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 27 Data size: 3298 Basic stats: COMPLETE Column stats: NONE
                   Map Join Operator
                     condition map:
                          Left Semi Join 0 to 1
-                    condition expressions:
-                      0 {_col0} {_col3}
-                      1 
                     keys:
                       0 _col1 (type: int)
                       1 _col0 (type: int)
                     outputColumnNames: _col0, _col3
                     input vertices:
                       1 Map 4
-                    Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: int), _col3 (type: int)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -211,9 +199,6 @@ STAGE PLANS:
                     predicate: (((l_partkey is not null and l_orderkey is not null) and l_linenumber is not null) and (l_linenumber = 1)) (type: boolean)
                     Statistics: Num rows: 6 Data size: 719 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {_col0}
-                        1 {l_orderkey} {l_suppkey}
                       keys:
                         0 _col0 (type: int)
                         1 l_partkey (type: int)
@@ -237,9 +222,6 @@ STAGE PLANS:
                         outputColumnNames: _col0, _col1
                         Statistics: Num rows: 6 Data size: 719 Basic stats: COMPLETE Column stats: NONE
                         Spark HashTable Sink Operator
-                          condition expressions:
-                            0 {_col0} {_col3}
-                            1 
                           keys:
                             0 _col1 (type: int), 1 (type: int)
                             1 _col0 (type: int), _col1 (type: int)
@@ -278,40 +260,34 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0
-                Statistics: Num rows: 50 Data size: 5999 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 25 Data size: 2999 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
-                  condition expressions:
-                    0 {_col0}
-                    1 {l_orderkey} {l_suppkey}
                   keys:
                     0 _col0 (type: int)
                     1 l_partkey (type: int)
                   outputColumnNames: _col0, _col1, _col3
                   input vertices:
                     1 Map 3
-                  Statistics: Num rows: 55 Data size: 6598 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 27 Data size: 3298 Basic stats: COMPLETE Column stats: NONE
                   Map Join Operator
                     condition map:
                          Left Semi Join 0 to 1
-                    condition expressions:
-                      0 {_col0} {_col3}
-                      1 
                     keys:
                       0 _col1 (type: int), 1 (type: int)
                       1 _col0 (type: int), _col1 (type: int)
                     outputColumnNames: _col0, _col3
                     input vertices:
                       1 Map 4
-                    Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: int), _col3 (type: int)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 60 Data size: 7257 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 29 Data size: 3627 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_orderby_5.q.out Mon Dec 29 02:13:57 2014
@@ -148,11 +148,11 @@ STAGE PLANS:
                 keys: KEY._col0 (type: boolean)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: boolean)
                   sort order: -
-                  Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: bigint)
             Execution mode: vectorized
         Reducer 3 
@@ -160,10 +160,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: boolean), VALUE._col0 (type: bigint)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vector_string_concat.q.out Mon Dec 29 02:13:57 2014
@@ -161,3 +161,246 @@ sarah garcia	      sarah garcia      	|
 zach young	      zach young      	|      zach young|
 david underhill	      david underhill      	|      david underhill|
 yuri carson	      yuri carson      	|      yuri carson|
+PREHOOK: query: ------------------------------------------------------------------------------------------
+
+create table vectortab2k(
+            t tinyint,
+            si smallint,
+            i int,
+            b bigint,
+            f float,
+            d double,
+            dc decimal(38,18),
+            bo boolean,
+            s string,
+            s2 string,
+            ts timestamp,
+            ts2 timestamp,
+            dt date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@vectortab2k
+POSTHOOK: query: ------------------------------------------------------------------------------------------
+
+create table vectortab2k(
+            t tinyint,
+            si smallint,
+            i int,
+            b bigint,
+            f float,
+            d double,
+            dc decimal(38,18),
+            bo boolean,
+            s string,
+            s2 string,
+            ts timestamp,
+            ts2 timestamp,
+            dt date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@vectortab2k
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@vectortab2k
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/vectortab2k' OVERWRITE INTO TABLE vectortab2k
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@vectortab2k
+PREHOOK: query: create table vectortab2korc(
+            t tinyint,
+            si smallint,
+            i int,
+            b bigint,
+            f float,
+            d double,
+            dc decimal(38,18),
+            bo boolean,
+            s string,
+            s2 string,
+            ts timestamp,
+            ts2 timestamp,
+            dt date)
+STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@vectortab2korc
+POSTHOOK: query: create table vectortab2korc(
+            t tinyint,
+            si smallint,
+            i int,
+            b bigint,
+            f float,
+            d double,
+            dc decimal(38,18),
+            bo boolean,
+            s string,
+            s2 string,
+            ts timestamp,
+            ts2 timestamp,
+            dt date)
+STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@vectortab2korc
+PREHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k
+PREHOOK: type: QUERY
+PREHOOK: Input: default@vectortab2k
+PREHOOK: Output: default@vectortab2korc
+POSTHOOK: query: INSERT INTO TABLE vectortab2korc SELECT * FROM vectortab2k
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@vectortab2k
+POSTHOOK: Output: default@vectortab2korc
+POSTHOOK: Lineage: vectortab2korc.b SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:b, type:bigint, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.bo SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.d SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.dc SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dc, type:decimal(38,18), comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.dt SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:dt, type:date, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.f SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.i SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.s SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.s2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:s2, type:string, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.si SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.t SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:t, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.ts SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: vectortab2korc.ts2 SIMPLE [(vectortab2k)vectortab2k.FieldSchema(name:ts2, type:timestamp, comment:null), ]
+PREHOOK: query: EXPLAIN
+SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING)) AS `field`
+    FROM vectortab2korc 
+    GROUP BY CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING))
+    LIMIT 50
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING)) AS `field`
+    FROM vectortab2korc 
+    GROUP BY CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING))
+    LIMIT 50
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP, 3)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: vectortab2korc
+                  Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: dt (type: date)
+                    outputColumnNames: dt
+                    Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      keys: concat(concat(concat('Quarter ', UDFToString(UDFToInteger((((month(dt) - 1) / 3) + 1)))), '-'), UDFToString(year(dt))) (type: string)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 2000 Data size: 918712 Basic stats: COMPLETE Column stats: NONE
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1000 Data size: 459356 Basic stats: COMPLETE Column stats: NONE
+                  Limit
+                    Number of rows: 50
+                    Statistics: Num rows: 50 Data size: 22950 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 50 Data size: 22950 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 50
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING)) AS `field`
+    FROM vectortab2korc 
+    GROUP BY CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING))
+    LIMIT 50
+PREHOOK: type: QUERY
+PREHOOK: Input: default@vectortab2korc
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING)) AS `field`
+    FROM vectortab2korc 
+    GROUP BY CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1) / 3 + 1 AS INT) AS STRING)),'-'),CAST(YEAR(dt) AS STRING))
+    LIMIT 50
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@vectortab2korc
+#### A masked pattern was here ####
+Quarter 3-2051
+Quarter 3-2045
+Quarter 4-2095
+Quarter 1-2020
+Quarter 3-2042
+Quarter 2-1971
+Quarter 1-2044
+Quarter 4-1987
+Quarter 1-2047
+Quarter 1-2101
+Quarter 4-2080
+Quarter 3-2087
+Quarter 4-2047
+Quarter 2-2061
+Quarter 2-2103
+Quarter 1-1978
+Quarter 4-1984
+Quarter 3-1982
+Quarter 2-2001
+Quarter 3-2024
+Quarter 2-2076
+Quarter 1-2074
+Quarter 3-2105
+Quarter 4-1999
+Quarter 3-2072
+Quarter 3-2000
+Quarter 2-2040
+Quarter 4-1972
+Quarter 4-2041
+Quarter 1-2026
+Quarter 3-2015
+Quarter 1-2041
+Quarter 3-1979
+Quarter 3-1970
+Quarter 3-2066
+Quarter 1-2077
+Quarter 4-2059
+Quarter 1-2086
+Quarter 3-2030
+Quarter 2-2067
+Quarter 4-2065
+Quarter 3-2021
+Quarter 3-2012
+Quarter 2-2037
+Quarter 1-1987
+Quarter 4-2014
+Quarter 4-2038
+Quarter 4-1975
+Quarter 1-2053
+Quarter 4-2068

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_bucketmapjoin1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_bucketmapjoin1.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_bucketmapjoin1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_bucketmapjoin1.q.out Mon Dec 29 02:13:57 2014
@@ -107,61 +107,36 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-1
     Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 3), Map 3 (PARTITION-LEVEL SORT, 3)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: a
-                  Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
-            Execution mode: vectorized
-        Map 3 
-            Map Operator Tree:
-                TableScan
                   alias: b
                   Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
+                    Sorted Merge Bucket Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 key (type: int)
+                        1 key (type: int)
+                      outputColumnNames: _col0, _col1, _col5, _col6
+                      Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
             Execution mode: vectorized
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0} {VALUE._col0}
-                  1 {KEY.reducesinkkey0} {VALUE._col0}
-                outputColumnNames: _col0, _col1, _col5, _col6
-                Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
-                  outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -196,8 +171,6 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-1
     Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 3), Map 3 (PARTITION-LEVEL SORT, 3)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -208,48 +181,26 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
+                    Sorted Merge Bucket Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 key (type: int)
+                        1 key (type: int)
+                      outputColumnNames: _col0, _col1, _col5, _col6
+                      Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
             Execution mode: vectorized
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 2 Data size: 50 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0} {VALUE._col0}
-                  1 {KEY.reducesinkkey0} {VALUE._col0}
-                outputColumnNames: _col0, _col1, _col5, _col6
-                Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
-                  outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -294,8 +245,6 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-1
     Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 3), Map 3 (PARTITION-LEVEL SORT, 3)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -306,48 +255,26 @@ STAGE PLANS:
                   Filter Operator
                     predicate: key is not null (type: boolean)
                     Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 104 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
+                    Sorted Merge Bucket Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 key (type: int)
+                        1 key (type: int)
+                      outputColumnNames: _col0, _col1, _col5, _col6
+                      Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
             Execution mode: vectorized
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: key (type: int)
-                      sort order: +
-                      Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: value (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0} {VALUE._col0}
-                  1 {KEY.reducesinkkey0} {VALUE._col0}
-                outputColumnNames: _col0, _col1, _col5, _col6
-                Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string)
-                  outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out Mon Dec 29 02:13:57 2014
@@ -25,9 +25,6 @@ STAGE PLANS:
                     predicate: cint is not null (type: boolean)
                     Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE
                     Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {cint}
-                        1 
                       keys:
                         0 cint (type: int)
                         1 cint (type: int)
@@ -51,9 +48,6 @@ STAGE PLANS:
                     Map Join Operator
                       condition map:
                            Inner Join 0 to 1
-                      condition expressions:
-                        0 {cint}
-                        1 {cint}
                       keys:
                         0 cint (type: int)
                         1 cint (type: int)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_nested_mapjoin.q.out Mon Dec 29 02:13:57 2014
@@ -3,13 +3,12 @@ PREHOOK: type: QUERY
 POSTHOOK: query: explain select sum(t1.td) from (select  v1.csmallint as tsi, v1.cdouble as td from alltypesorc v1, alltypesorc v2 where v1.ctinyint=v2.ctinyint) t1 join alltypesorc v3 on t1.tsi=v3.csmallint
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-3 is a root stage
-  Stage-2 depends on stages: Stage-3
+  Stage-2 is a root stage
   Stage-1 depends on stages: Stage-2
   Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
-  Stage: Stage-3
+  Stage: Stage-2
     Spark
 #### A masked pattern was here ####
       Vertices:
@@ -21,98 +20,94 @@ STAGE PLANS:
                   Filter Operator
                     predicate: (ctinyint is not null and csmallint is not null) (type: boolean)
                     Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE
-                    Spark HashTable Sink Operator
-                      condition expressions:
-                        0 {csmallint} {cdouble}
-                        1 {ctinyint}
-                      keys:
-                        0 ctinyint (type: tinyint)
-                        1 ctinyint (type: tinyint)
+                    Select Operator
+                      expressions: ctinyint (type: tinyint), csmallint (type: smallint), cdouble (type: double)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 3072 Data size: 94309 Basic stats: COMPLETE Column stats: NONE
+                      Spark HashTable Sink Operator
+                        keys:
+                          0 _col0 (type: tinyint)
+                          1 _col0 (type: tinyint)
             Local Work:
               Map Reduce Local Work
-
-  Stage: Stage-2
-    Spark
-#### A masked pattern was here ####
-      Vertices:
-        Map 2 
+        Map 4 
             Map Operator Tree:
                 TableScan
-                  alias: v2
+                  alias: v1
                   Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ctinyint is not null (type: boolean)
+                    predicate: csmallint is not null (type: boolean)
                     Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE
-                    Map Join Operator
-                      condition map:
-                           Inner Join 0 to 1
-                      condition expressions:
-                        0 {ctinyint} {csmallint} {cdouble}
-                        1 {ctinyint}
-                      keys:
-                        0 ctinyint (type: tinyint)
-                        1 ctinyint (type: tinyint)
-                      outputColumnNames: _col0, _col1, _col5, _col15
-                      input vertices:
-                        0 Map 1
-                      Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
-                      Filter Operator
-                        predicate: (_col0 = _col15) (type: boolean)
-                        Statistics: Num rows: 3379 Data size: 103739 Basic stats: COMPLETE Column stats: NONE
-                        Select Operator
-                          expressions: _col1 (type: smallint), _col5 (type: double)
-                          outputColumnNames: _col0, _col1
-                          Statistics: Num rows: 3379 Data size: 103739 Basic stats: COMPLETE Column stats: NONE
-                          Spark HashTable Sink Operator
-                            condition expressions:
-                              0 {_col1}
-                              1 
-                            keys:
-                              0 _col0 (type: smallint)
-                              1 csmallint (type: smallint)
+                    Select Operator
+                      expressions: csmallint (type: smallint)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE
+                      Spark HashTable Sink Operator
+                        keys:
+                          0 _col0 (type: smallint)
+                          1 _col0 (type: smallint)
             Local Work:
               Map Reduce Local Work
 
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 4 <- Map 3 (GROUP, 1)
+        Reducer 3 <- Map 2 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: v3
+                  alias: v1
                   Statistics: Num rows: 12288 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: csmallint is not null (type: boolean)
+                    predicate: ctinyint is not null (type: boolean)
                     Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE
-                    Map Join Operator
-                      condition map:
-                           Inner Join 0 to 1
-                      condition expressions:
-                        0 {_col1}
-                        1 
-                      keys:
-                        0 _col0 (type: smallint)
-                        1 csmallint (type: smallint)
-                      outputColumnNames: _col1
-                      input vertices:
-                        0 Map 2
-                      Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: sum(_col1)
-                        mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          sort order: 
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: double)
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 6144 Data size: 188618 Basic stats: COMPLETE Column stats: NONE
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        keys:
+                          0 _col0 (type: tinyint)
+                          1 _col0 (type: tinyint)
+                        outputColumnNames: _col1, _col2
+                        input vertices:
+                          0 Map 1
+                        Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
+                        Select Operator
+                          expressions: _col1 (type: smallint), _col2 (type: double)
+                          outputColumnNames: _col0, _col1
+                          Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
+                          Map Join Operator
+                            condition map:
+                                 Inner Join 0 to 1
+                            keys:
+                              0 _col0 (type: smallint)
+                              1 _col0 (type: smallint)
+                            outputColumnNames: _col1
+                            input vertices:
+                              1 Map 4
+                            Statistics: Num rows: 7433 Data size: 228226 Basic stats: COMPLETE Column stats: NONE
+                            Select Operator
+                              expressions: _col1 (type: double)
+                              outputColumnNames: _col0
+                              Statistics: Num rows: 7433 Data size: 228226 Basic stats: COMPLETE Column stats: NONE
+                              Group By Operator
+                                aggregations: sum(_col0)
+                                mode: hash
+                                outputColumnNames: _col0
+                                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                                Reduce Output Operator
+                                  sort order: 
+                                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                                  value expressions: _col0 (type: double)
             Local Work:
               Map Reduce Local Work
             Execution mode: vectorized
-        Reducer 4 
+        Reducer 3 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: sum(VALUE._col0)
@@ -146,4 +141,4 @@ POSTHOOK: query: select sum(t1.td) from
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-6.06519093248863E11
+6.065190932486892E11

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out Mon Dec 29 02:13:57 2014
@@ -258,7 +258,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -278,7 +278,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -548,7 +548,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -568,7 +568,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -615,7 +615,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -635,7 +635,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -659,9 +659,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
-                  1 
+                keys:
+                  0 p_partkey (type: int)
+                  1 p_partkey (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                 Statistics: Num rows: 14 Data size: 8823 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
@@ -860,7 +860,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -880,7 +880,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -1117,7 +1117,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -1137,7 +1137,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -1405,7 +1405,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -1425,7 +1425,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -1703,7 +1703,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -1723,7 +1723,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -1770,12 +1770,12 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: string)
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                   tag: -1
                   value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int)
                   auto parallelism: false
@@ -1784,19 +1784,19 @@ STAGE PLANS:
             Needs Tagging: false
             Reduce Operator Tree:
               Extract
-                Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
-                  Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string), _col2 (type: int), _wcol0 (type: int), _wcol1 (type: int), _col2 (type: int), (_col2 - _wcol2) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                    Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
                       GlobalTableId: 0
 #### A masked pattern was here ####
                       NumFilesPerFileSink: 1
-                      Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
 #### A masked pattern was here ####
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1967,7 +1967,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -1987,7 +1987,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -2033,7 +2033,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -2053,7 +2053,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -2100,9 +2100,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
-                  1 
+                keys:
+                  0 _col0 (type: int)
+                  1 p_partkey (type: int)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                 Statistics: Num rows: 14 Data size: 8823 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
@@ -2283,7 +2283,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -2303,7 +2303,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -2347,7 +2347,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -2367,7 +2367,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -2390,9 +2390,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 
-                  1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
+                keys:
+                  0 p_partkey (type: int)
+                  1 _col0 (type: int)
                 outputColumnNames: _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20
                 Statistics: Num rows: 14 Data size: 8823 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
@@ -2604,7 +2604,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -2624,7 +2624,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -2881,7 +2881,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -2901,7 +2901,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -3160,7 +3160,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -3180,7 +3180,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -3449,7 +3449,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -3469,7 +3469,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -3775,7 +3775,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -3795,7 +3795,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -4161,7 +4161,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -4181,7 +4181,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -4227,7 +4227,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -4247,7 +4247,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -4294,9 +4294,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 {VALUE._col0} {VALUE._col1} {VALUE._col4} {VALUE._col6}
-                  1 
+                keys:
+                  0 _col0 (type: int)
+                  1 p_partkey (type: int)
                 outputColumnNames: _col1, _col2, _col5, _col7
                 Statistics: Num rows: 14 Data size: 8823 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
@@ -4487,7 +4487,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -4507,7 +4507,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -4769,7 +4769,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -4789,7 +4789,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -4815,12 +4815,12 @@ STAGE PLANS:
                 keys: KEY._col0 (type: string), KEY._col1 (type: string)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col0 (type: string)
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                   tag: -1
                   value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double)
                   auto parallelism: false
@@ -4829,14 +4829,14 @@ STAGE PLANS:
             Needs Tagging: false
             Reduce Operator Tree:
               Extract
-                Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
-                  Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: string), _col1 (type: string)
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                     tag: -1
                     value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double)
                     auto parallelism: false
@@ -4844,19 +4844,19 @@ STAGE PLANS:
             Needs Tagging: false
             Reduce Operator Tree:
               Extract
-                Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
-                  Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double), _wcol0 (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
                       GlobalTableId: 0
 #### A masked pattern was here ####
                       NumFilesPerFileSink: 1
-                      Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 13 Data size: 8021 Basic stats: COMPLETE Column stats: NONE
 #### A masked pattern was here ####
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
@@ -5230,7 +5230,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -5250,7 +5250,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -5306,7 +5306,7 @@ STAGE PLANS:
                           properties:
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
-                            columns.comments 
+                            columns.comments 
                             columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                             name default.part_5
@@ -5343,7 +5343,7 @@ STAGE PLANS:
                           properties:
                             bucket_count -1
                             columns p_mfgr,p_name,p_size,r,dr,s
-                            columns.comments 
+                            columns.comments 
                             columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                             name default.part_4
@@ -5402,7 +5402,7 @@ STAGE PLANS:
               properties:
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1
-                columns.comments 
+                columns.comments 
                 columns.types string:string:int:int:int:int:double:int
 #### A masked pattern was here ####
                 name default.part_5
@@ -5428,7 +5428,7 @@ STAGE PLANS:
               properties:
                 bucket_count -1
                 columns p_mfgr,p_name,p_size,r,dr,s
-                columns.comments 
+                columns.comments 
                 columns.types string:string:int:int:int:double
 #### A masked pattern was here ####
                 name default.part_4
@@ -5742,7 +5742,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -5762,7 +5762,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -6099,7 +6099,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -6119,7 +6119,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -6452,7 +6452,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -6472,7 +6472,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -6802,7 +6802,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -6822,7 +6822,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -7200,7 +7200,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -7220,7 +7220,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc
@@ -7549,7 +7549,7 @@ STAGE PLANS:
                     COLUMN_STATS_ACCURATE true
                     bucket_count -1
                     columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                    columns.comments 
+                    columns.comments 
                     columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                     name default.part_orc
@@ -7569,7 +7569,7 @@ STAGE PLANS:
                       COLUMN_STATS_ACCURATE true
                       bucket_count -1
                       columns p_partkey,p_name,p_mfgr,p_brand,p_type,p_size,p_container,p_retailprice,p_comment
-                      columns.comments 
+                      columns.comments 
                       columns.types int:string:string:string:string:int:string:double:string
 #### A masked pattern was here ####
                       name default.part_orc

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out?rev=1648280&r1=1648279&r2=1648280&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/vectorized_shufflejoin.q.out Mon Dec 29 02:13:57 2014
@@ -51,9 +51,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 {KEY.reducesinkkey0}
-                  1 {KEY.reducesinkkey0}
+                keys:
+                  0 cint (type: int)
+                  1 cint (type: int)
                 outputColumnNames: _col2, _col17
                 Statistics: Num rows: 6758 Data size: 207479 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator