You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2016/12/06 00:27:28 UTC

[1/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master 7089ac7b6 -> 9b2badc3e


http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/smb_mapjoin_46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_46.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_46.q.out
new file mode 100644
index 0000000..df8b14a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/smb_mapjoin_46.q.out
@@ -0,0 +1,1695 @@
+PREHOOK: query: CREATE TABLE aux1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@aux1
+POSTHOOK: query: CREATE TABLE aux1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@aux1
+PREHOOK: query: INSERT INTO aux1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@aux1
+POSTHOOK: query: INSERT INTO aux1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@aux1
+POSTHOOK: Lineage: aux1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: aux1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: aux1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT OVERWRITE TABLE test1
+SELECT * FROM aux1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@aux1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT OVERWRITE TABLE test1
+SELECT * FROM aux1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@aux1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(aux1)aux1.FieldSchema(name:col_1, type:string, comment:null), ]
+POSTHOOK: Lineage: test1.key SIMPLE [(aux1)aux1.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: test1.value SIMPLE [(aux1)aux1.FieldSchema(name:value, type:int, comment:null), ]
+PREHOOK: query: CREATE TABLE aux2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@aux2
+POSTHOOK: query: CREATE TABLE aux2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@aux2
+PREHOOK: query: INSERT INTO aux2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@aux2
+POSTHOOK: query: INSERT INTO aux2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@aux2
+POSTHOOK: Lineage: aux2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: aux2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: aux2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT OVERWRITE TABLE test2
+SELECT * FROM aux2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@aux2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT OVERWRITE TABLE test2
+SELECT * FROM aux2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@aux2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(aux2)aux2.FieldSchema(name:col_2, type:string, comment:null), ]
+POSTHOOK: Lineage: test2.key SIMPLE [(aux2)aux2.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: test2.value SIMPLE [(aux2)aux2.FieldSchema(name:value, type:int, comment:null), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+100	1	Bob	NULL	NULL	NULL
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	102	2	Del
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key BETWEEN 100 AND 102 (type: boolean)
+              Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: int), col_2 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  filter predicates:
+                    0 {_col0 BETWEEN 100 AND 102}
+                    1 
+                  keys:
+                    0 
+                    1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	102	2	Del
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	104	3	Fli
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	105	NULL	None
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	104	3	Fli
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	104	3	Fli
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	NULL	NULL	NULL
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+101	2	Car	103	2	Ema
+99	2	Mat	103	2	Ema
+100	1	Bob	103	2	Ema
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+101	2	Car	103	2	Ema
+99	2	Mat	103	2	Ema
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+101	2	Car	103	2	Ema
+99	2	Mat	103	2	Ema
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+99	0	Alice	102	2	Del
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+99	0	Alice	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+99	0	Alice	102	2	Del
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	NULL	104	3	Fli
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli


[3/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/llap/vectorized_join46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_join46.q.out b/ql/src/test/results/clientpositive/llap/vectorized_join46.q.out
new file mode 100644
index 0000000..d8d7748
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/vectorized_join46.q.out
@@ -0,0 +1,1867 @@
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col1 (type: int)
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+101	2	Car	103	2	Ema
+100	1	Bob	103	2	Ema
+99	2	Mat	103	2	Ema
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+101	2	Car	103	2	Ema
+100	1	Bob	103	2	Ema
+99	2	Mat	103	2	Ema
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+101	2	Car	103	2	Ema
+99	2	Mat	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli


[6/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/join46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join46.q.out b/ql/src/test/results/clientpositive/join46.q.out
new file mode 100644
index 0000000..90f0620
--- /dev/null
+++ b/ql/src/test/results/clientpositive/join46.q.out
@@ -0,0 +1,1948 @@
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key BETWEEN 100 AND 102 (type: boolean)
+              Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: int), col_2 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col1 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col1 (type: int)
+                  Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          filter predicates:
+            0 {VALUE._col0 BETWEEN 100 AND 102}
+            1 
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+99	2	Mat	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key BETWEEN 100 AND 102 (type: boolean)
+              Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: int), col_2 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          filter predicates:
+            0 {VALUE._col0 BETWEEN 100 AND 102}
+            1 
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          filter predicates:
+            0 {VALUE._col0 BETWEEN 100 AND 102}
+            1 
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	NULL	104	3	Fli
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	NULL	104	3	Fli
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1, Stage-4
+  Stage-4 is a root stage
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+          residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
+          Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	104	3	Fli	99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli	101	2	Car	102	2	Del
+NULL	NULL	NULL	104	3	Fli	101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	98	NULL	None	NULL	NULL	NULL
+99	2	Mat	102	2	Del	100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del	99	0	Alice	NULL	NULL	NULL
+101	2	Car	102	2	Del	100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del	99	0	Alice	NULL	NULL	NULL
+101	2	Car	103	2	Ema	100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	99	2	Mat	102	2	Del
+NULL	NULL	NULL	105	NULL	None	101	2	Car	102	2	Del
+NULL	NULL	NULL	105	NULL	None	101	2	Car	103	2	Ema
+NULL	NULL	NULL	105	NULL	None	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	98	NULL	None	NULL	NULL	NULL


[5/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/llap/join46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join46.q.out b/ql/src/test/results/clientpositive/llap/join46.q.out
new file mode 100644
index 0000000..fe1cf37
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/join46.q.out
@@ -0,0 +1,2190 @@
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col1 (type: int)
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {VALUE._col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+101	2	Car	102	2	Del
+Warning: Shuffle Join MERGEJOIN[11][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {VALUE._col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[11][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {VALUE._col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+PREHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE), Reducer 6 (SIMPLE_EDGE)
+        Reducer 6 <- Map 5 (SIMPLE_EDGE), Map 7 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 7 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
+                Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 6 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+PREHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+F

<TRUNCATED>

[2/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/mapjoin46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mapjoin46.q.out b/ql/src/test/results/clientpositive/mapjoin46.q.out
new file mode 100644
index 0000000..1f5ea21
--- /dev/null
+++ b/ql/src/test/results/clientpositive/mapjoin46.q.out
@@ -0,0 +1,2002 @@
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key BETWEEN 100 AND 102 (type: boolean)
+              Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: int), col_2 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  filter predicates:
+                    0 {_col0 BETWEEN 100 AND 102}
+                    1 
+                  keys:
+                    0 _col1 (type: int)
+                    1 _col1 (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key BETWEEN 100 AND 102 (type: boolean)
+              Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: int), col_2 (type: string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  filter predicates:
+                    0 {_col0 BETWEEN 100 AND 102}
+                    1 
+                  keys:
+                    0 
+                    1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                filter predicates:
+                  0 {_col0 BETWEEN 100 AND 102}
+                  1 
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_1:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_1:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	103	2	Ema
+101	2	Car	103	2	Ema
+100	1	Bob	104	3	Fli
+101	2	Car	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	103	2	Ema
+101	2	Car	103	2	Ema
+100	1	Bob	104	3	Fli
+101	2	Car	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 
+                  1 
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-4 is a root stage
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-4
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	103	2	Ema
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+NULL	NULL	None	102	2	Del
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	NULL	104	3	Fli
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col1 (type: int)
+                sort order: +
+                Map-reduce partition columns: _col1 (type: int)
+                Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                value expressions: _col0 (type: int), _col2 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 _col1 (type: int)
+            1 _col1 (type: int)
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+          Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+98	NULL	None	NULL	NULL	NULL
+NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema
+101	2	Car	102	2	Del
+99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-8 is a root stage
+  Stage-2 depends on stages: Stage-8
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-8
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:test1 
+          Fetch Operator
+            limit: -1
+        $hdt$_2:$hdt$_3:test2 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:test1 
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+        $hdt$_2:$hdt$_3:test2 
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              HashTable Sink Operator
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test2
+            Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_2 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Right Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+          TableScan
+            alias: test1
+            Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: int), value (type: int), col_1 (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Left Outer Join0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+      Local Work:
+        Map Reduce Local Work
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+          residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
+          Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None	101	2	Car	103	2	Ema
+NULL	NULL	NULL	105	NULL	None	101	2	Car	102	2	Del
+NULL	NULL	NULL	105	NULL	None	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	99	2	Mat	102	2	Del
+NULL	NULL	NULL	105	NULL	None	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	98	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli	101	2	Car	102	2	Del
+NULL	NULL	NULL	104	3	Fli	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	98	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	NULL	NULL	None	NULL	NULL	NULL
+101	2	Car	103	2	Ema	100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema	99	0	Alice	NULL	NULL	NULL
+101	2	Car	102	2	Del	100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del	99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del	100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del	99	0	Alice	NULL	NULL	NULL


[4/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mapjoin46.q.out b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
new file mode 100644
index 0000000..656fcc4
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/mapjoin46.q.out
@@ -0,0 +1,2101 @@
+PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test1
+POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test1
+PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test1
+POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test1
+POSTHOOK: Lineage: test1.col_1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test1.key EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test1.value EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test2
+POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test2
+PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@test2
+POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@test2
+POSTHOOK: Lineage: test2.col_2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
+POSTHOOK: Lineage: test2.key EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: test2.value EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col1 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col1 (type: int)
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key BETWEEN 100 AND 102 (type: boolean)
+                    Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: int), col_2 (type: string)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 9 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+PREHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+99	2	Mat	103	2	Ema
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      filter predicates:
+                        0 {_col0 BETWEEN 100 AND 102}
+                        1 
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	NULL	NULL	NULL
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	105	NULL	None
+100	1	Bob	104	3	Fli
+100	1	Bob	103	2	Ema
+101	2	Car	102	2	Del
+101	2	Car	105	NULL	None
+101	2	Car	104	3	Fli
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+PREHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 2 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 2
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+101	2	Car	103	2	Ema
+100	1	Bob	103	2	Ema
+99	2	Mat	103	2	Ema
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+101	2	Car	103	2	Ema
+100	1	Bob	103	2	Ema
+99	2	Mat	103	2	Ema
+101	2	Car	104	3	Fli
+100	1	Bob	104	3	Fli
+101	2	Car	105	NULL	None
+100	1	Bob	105	NULL	None
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 
+                        1 
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+101	2	Car	102	2	Del
+100	1	Bob	102	2	Del
+99	2	Mat	102	2	Del
+99	0	Alice	102	2	Del
+98	NULL	None	102	2	Del
+101	2	Car	103	2	Ema
+99	2	Mat	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: llap
+            LLAP IO: no inputs
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+100	1	Bob	103	2	Ema
+100	1	Bob	104	3	Fli
+100	1	Bob	105	NULL	None
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+101	2	Car	104	3	Fli
+101	2	Car	105	NULL	None
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	None	102	2	Del
+98	NULL	None	102	2	Del
+99	0	Alice	102	2	Del
+99	2	Mat	102	2	Del
+99	2	Mat	103	2	Ema
+100	1	Bob	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+NULL	NULL	NULL	105	NULL	None
+PREHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 _col1 (type: int)
+                  1 _col1 (type: int)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+NULL	NULL	NULL	105	NULL	None
+NULL	NULL	None	NULL	NULL	NULL
+98	NULL	None	NULL	NULL	NULL
+99	0	Alice	NULL	NULL	NULL
+100	1	Bob	NULL	NULL	NULL
+99	2	Mat	102	2	Del
+101	2	Car	102	2	Del
+101	2	Car	103	2	Ema
+NULL	NULL	NULL	104	3	Fli
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+PREHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 2 <- Map 1 (BROADCAST_EDGE)
+        Map 4 <- Map 5 (BROADCAST_EDGE)
+        Reducer 3 <- Map 2 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Right Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        0 Map 1
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: test1
+                  Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_1 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Left Outer Join0 to 1
+                      keys:
+                        0 _col1 (type: int)
+                        1 _col1 (type: int)
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                      input vertices:
+                        1 Map 5
+                      residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)}
+                      Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: test2
+                  Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: int), value (type: int), col_2 (type: string)
+                    outputColumnNames: _col0, _col1, _col2
+                    Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col1 (type: int)
+                      sort order: +
+                      Map-reduce partition columns: _col1 (type: int)
+                      Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: int), _col2 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Outer Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))}
+                Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[26][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 3' is a cross product
+PREHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1
+PREHOOK: Input: default@test2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1
+POSTHOOK: Input: default@test2
+#### A masked pattern was here ####
+99	2	Mat	102	2	Del	99	0	Alice	NULL	NULL	NULL
+99	2	Mat	102	2	Del	100	1	Bob	NULL	NULL	NULL
+101	2	Car	102	2	Del	99	0	Alice	NULL	NULL	NULL
+101	2	Car	102	2	Del	100	1	Bob	NULL	NULL	NULL
+101	2	Car	103	2	Ema	99	0	Alice	NULL	NULL	NULL
+101	2	Car	103	2	Ema	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	98	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	99	2	Mat	102	2	Del
+NULL	NULL	NULL	104	3	Fli	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	104	3	Fli	101	2	Car	102	2	Del
+NULL	NULL	NULL	104	3	Fli	101	2	Car	103	2	Ema
+NULL	NULL	NULL	105	NULL	None	NULL	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	98	NULL	None	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	99	0	Alice	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	99	2	Mat	102	2	Del
+NULL	NULL	NULL	105	NULL	None	100	1	Bob	NULL	NULL	NULL
+NULL	NULL	NULL	105	NULL	None	101	2	Car	102	2	Del
+NULL	NULL	NULL	105	NULL	None	101	2	Car	103	2	Ema


[7/7] hive git commit: HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

Posted by jc...@apache.org.
HIVE-15251: Provide support for complex expressions in ON clauses for OUTER joins (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9b2badc3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9b2badc3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9b2badc3

Branch: refs/heads/master
Commit: 9b2badc3e1c8fdb84a78bcbf503f19c219b4b5c0
Parents: 7089ac7
Author: Jesus Camacho Rodriguez <jc...@apache.org>
Authored: Fri Dec 2 17:15:53 2016 +0000
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Tue Dec 6 00:26:19 2016 +0000

----------------------------------------------------------------------
 .../test/resources/testconfiguration.properties |    5 +-
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |    2 +-
 .../hadoop/hive/ql/exec/CommonJoinOperator.java |  295 ++-
 .../apache/hadoop/hive/ql/exec/JoinUtil.java    |   17 +
 .../hive/ql/optimizer/ConvertJoinMapJoin.java   |    2 +
 .../hive/ql/optimizer/MapJoinProcessor.java     |    1 +
 .../hive/ql/optimizer/physical/Vectorizer.java  |    4 +
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   32 +-
 .../apache/hadoop/hive/ql/plan/JoinDesc.java    |   33 +
 ql/src/test/queries/clientnegative/join45.q     |   13 -
 ql/src/test/queries/clientpositive/join46.q     |  263 +++
 ql/src/test/queries/clientpositive/mapjoin46.q  |  264 +++
 .../queries/clientpositive/smb_mapjoin_46.q     |  240 ++
 .../queries/clientpositive/vectorized_join46.q  |  225 ++
 ql/src/test/results/clientnegative/join45.q.out |   13 -
 ql/src/test/results/clientpositive/join46.q.out | 1948 ++++++++++++++++
 .../results/clientpositive/llap/join46.q.out    | 2190 ++++++++++++++++++
 .../results/clientpositive/llap/mapjoin46.q.out | 2101 +++++++++++++++++
 .../clientpositive/llap/vectorized_join46.q.out | 1867 +++++++++++++++
 .../test/results/clientpositive/mapjoin46.q.out | 2002 ++++++++++++++++
 .../results/clientpositive/smb_mapjoin_46.q.out | 1695 ++++++++++++++
 21 files changed, 13111 insertions(+), 101 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 772e123..5db98f1 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -181,6 +181,7 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\
   join0.q,\
   join1.q,\
   join_emit_interval.q,\
+  join46.q,\
   join_nullsafe.q,\
   leftsemijoin.q,\
   limit_pushdown.q,\
@@ -191,6 +192,7 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\
   mapjoin2.q,\
   mapjoin_decimal.q,\
   mapjoin_mapjoin.q,\
+  mapjoin46.q,\
   merge1.q,\
   merge2.q,\
   mergejoin.q,\
@@ -690,7 +692,8 @@ minillaplocal.query.files=acid_globallimit.q,\
   database.q,\
   smb_mapjoin_17.q,\
   groupby_resolution.q,\
-  windowing_windowspec2.q
+  windowing_windowspec2.q,\
+  vectorized_join46.q
 
 encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_insert_partition_static.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index b62df35..a315057 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -459,7 +459,7 @@ public enum ErrorMsg {
     "requires \"AND <boolean>\" on the 1st WHEN MATCHED clause of <{0}>", true),
   MERGE_TOO_MANY_DELETE(10405, "MERGE statment can have at most 1 WHEN MATCHED ... DELETE clause: <{0}>", true),
   MERGE_TOO_MANY_UPDATE(10406, "MERGE statment can have at most 1 WHEN MATCHED ... UPDATE clause: <{0}>", true),
-  INVALID_JOIN_CONDITION(10407, "Complex condition not supported for (LEFT|RIGHT|FULL) OUTER JOIN"),
+  INVALID_JOIN_CONDITION(10407, "Error parsing condition in outer join"),
   //========================== 20000 range starts here ========================//
   SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),
   SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. "

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
index 5512ee2..940f2dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -31,6 +32,7 @@ import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer;
 import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -63,6 +65,16 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
    */
   protected transient List<ExprNodeEvaluator>[] joinFilters;
 
+  /**
+   * List of evaluators for conditions which appear on on-clause and needs to be
+   * evaluated before emitting rows. Currently, relevant only for outer joins.
+   *
+   * For instance, given the query:
+   *     select * from t1 right outer join t2 on t1.c1 + t2.c2 > t1.c3;
+   * The expression evaluator for t1.c1 + t2.c2 > t1.c3 will be stored in this list.
+   */
+  protected transient List<ExprNodeEvaluator> residualJoinFilters;
+
   protected transient int[][] filterMaps;
 
   /**
@@ -74,6 +86,24 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
    * The ObjectInspectors for join filters.
    */
   protected transient List<ObjectInspector>[] joinFilterObjectInspectors;
+
+  /**
+   * OIs corresponding to residualJoinFilters.
+   */
+  protected transient List<ObjectInspector> residualJoinFiltersOIs;
+
+  /**
+   * Will be true depending on content of residualJoinFilters.
+   */
+  protected transient boolean needsPostEvaluation;
+
+  /**
+   * This data structure is used to keep track of rows on which residualFilters
+   * evaluated to false. We will iterate on this container afterwards and emit
+   * rows appending NULL values if it was not done. Key is relation index.
+   */
+  protected transient Map<Integer, Object[]> rowContainerPostFilteredOuterJoin = null;
+
   /**
    * The standard ObjectInspectors for the join inputs.
    */
@@ -164,6 +194,9 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
     this.statsMap = clone.statsMap;
     this.joinFilters = clone.joinFilters;
     this.joinFilterObjectInspectors = clone.joinFilterObjectInspectors;
+    this.residualJoinFilters = clone.residualJoinFilters;
+    this.residualJoinFiltersOIs = clone.residualJoinFiltersOIs;
+    this.needsPostEvaluation = clone.needsPostEvaluation;
   }
 
   private <T extends JoinDesc> ObjectInspector getJoinOutputObjectInspector(
@@ -247,7 +280,7 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
         rowContainerObjectInspectors[alias] = rcOIs;
       }
       rowContainerStandardObjectInspectors =
-        JoinUtil.getStandardObjectInspectors(rowContainerObjectInspectors,NOTSKIPBIGTABLE, tagLen);
+        JoinUtil.getStandardObjectInspectors(rowContainerObjectInspectors, NOTSKIPBIGTABLE, tagLen);
     }
 
     dummyObj = new ArrayList[numAliases];
@@ -323,6 +356,30 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
       }
     }
 
+    // Create post-filtering evaluators if needed
+    if (conf.getResidualFilterExprs() != null) {
+      // Currently residual filter expressions are only used with outer joins, thus
+      // we add this safeguard.
+      // TODO: Remove this guard when support for residual expressions can be present
+      // for inner joins too. This would be added to improve efficiency in the evaluation
+      // of certain joins, since we will not be emitting rows which are thrown away by
+      // filter straight away.
+      assert !noOuterJoin;
+      residualJoinFilters = new ArrayList<>(conf.getResidualFilterExprs().size());
+      residualJoinFiltersOIs = new ArrayList<>(conf.getResidualFilterExprs().size());
+      for (int i = 0; i < conf.getResidualFilterExprs().size(); i++) {
+        ExprNodeDesc expr = conf.getResidualFilterExprs().get(i);
+        residualJoinFilters.add(ExprNodeEvaluatorFactory.get(expr));
+        residualJoinFiltersOIs.add(
+                residualJoinFilters.get(i).initialize(outputObjInspector));
+      }
+      needsPostEvaluation = true;
+      // We need to disable join emit interval, since for outer joins with post conditions
+      // we need to have the full view on the right matching rows to know whether we need
+      // to produce a row with NULL values or not
+      joinEmitInterval = -1;
+    }
+
     if (isLogInfoEnabled) {
       LOG.info("JOIN " + outputObjInspector.getTypeName() + " totalsz = " + totalSz);
     }
@@ -426,7 +483,8 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
   }
 
   // fill forwardCache with skipvector
-  private void createForwardJoinObject(boolean[] skip) throws HiveException {
+  // returns whether a record was forwarded
+  private boolean createForwardJoinObject(boolean[] skip) throws HiveException {
     Arrays.fill(forwardCache, null);
 
     boolean forward = false;
@@ -439,13 +497,30 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
       }
     }
     if (forward) {
-      internalForward(forwardCache, outputObjInspector);
-      countAfterReport = 0;
+      if (needsPostEvaluation) {
+        forward = !JoinUtil.isFiltered(forwardCache, residualJoinFilters, residualJoinFiltersOIs);
+      }
+      if (forward) {
+        // If it is not an outer join, or the post-condition filters
+        // are empty or the row passed them
+        internalForward(forwardCache, outputObjInspector);
+        countAfterReport = 0;
+      }
     }
+
+    return forward;
   }
 
   // entry point (aliasNum = 0)
   private void genJoinObject() throws HiveException {
+    if (needsPostEvaluation && 0 == numAliases - 2) {
+      int nextType = condn[0].getType();
+      if (nextType == JoinDesc.RIGHT_OUTER_JOIN || nextType == JoinDesc.FULL_OUTER_JOIN) {
+        // Initialize container to use for storing tuples before emitting them
+        rowContainerPostFilteredOuterJoin = new HashMap<>();
+      }
+    }
+
     boolean rightFirst = true;
     boolean hasFilter = hasFilter(order[0]);
     AbstractRowContainer.RowIterator<List<Object>> iter = storage[order[0]].rowIter();
@@ -460,78 +535,173 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
       genObject(1, rightFirst, rightNull);
       rightFirst = false;
     }
+
+    // Consolidation for outer joins
+    if (needsPostEvaluation && 0 == numAliases - 2) {
+      int nextType = condn[0].getType();
+      if (nextType == JoinDesc.RIGHT_OUTER_JOIN || nextType == JoinDesc.FULL_OUTER_JOIN) {
+        // If it is a RIGHT / FULL OUTER JOIN, we need to iterate through the row container
+        // that contains all the right records that did not produce results. Then, for each
+        // of those records, we replace the left side with NULL values, and produce the
+        // records.
+        // Observe that we only enter this block when we have finished iterating through
+        // all the left and right records (aliasNum == numAliases - 2), and thus, we have
+        // tried to evaluate the post-filter condition on every possible combination.
+        // NOTE: the left records that do not produce results (for LEFT / FULL OUTER JOIN)
+        // will always be caught in the genObject method
+        Arrays.fill(forwardCache, null);
+        for (Object[] row : rowContainerPostFilteredOuterJoin.values()) {
+          if (row == null) {
+            continue;
+          }
+          System.arraycopy(row, 0, forwardCache, offsets[numAliases - 1], row.length);
+          internalForward(forwardCache, outputObjInspector);
+          countAfterReport = 0;
+        }
+      }
+    }
   }
 
   // creates objects in recursive manner
   private void genObject(int aliasNum, boolean allLeftFirst, boolean allLeftNull)
       throws HiveException {
-    if (aliasNum < numAliases) {
+    JoinCondDesc joinCond = condn[aliasNum - 1];
+    int type = joinCond.getType();
+    int left = joinCond.getLeft();
+    int right = joinCond.getRight();
+
+    if (needsPostEvaluation && aliasNum == numAliases - 2) {
+      int nextType = condn[aliasNum].getType();
+      if (nextType == JoinDesc.RIGHT_OUTER_JOIN || nextType == JoinDesc.FULL_OUTER_JOIN) {
+        // Initialize container to use for storing tuples before emitting them
+        rowContainerPostFilteredOuterJoin = new HashMap<>();
+      }
+    }
 
-      boolean[] skip = skipVectors[aliasNum];
-      boolean[] prevSkip = skipVectors[aliasNum - 1];
+    boolean[] skip = skipVectors[aliasNum];
+    boolean[] prevSkip = skipVectors[aliasNum - 1];
 
-      JoinCondDesc joinCond = condn[aliasNum - 1];
-      int type = joinCond.getType();
-      int left = joinCond.getLeft();
-      int right = joinCond.getRight();
+    // search for match in the rhs table
+    AbstractRowContainer<List<Object>> aliasRes = storage[order[aliasNum]];
 
-      // search for match in the rhs table
-      AbstractRowContainer<List<Object>> aliasRes = storage[order[aliasNum]];
+    boolean needToProduceLeftRow = false;
+    boolean producedRow = false;
+    boolean done = false;
+    boolean loopAgain = false;
+    boolean tryLOForFO = type == JoinDesc.FULL_OUTER_JOIN;
 
-      boolean done = false;
-      boolean loopAgain = false;
-      boolean tryLOForFO = type == JoinDesc.FULL_OUTER_JOIN;
+    boolean rightFirst = true;
+    AbstractRowContainer.RowIterator<List<Object>> iter = aliasRes.rowIter();
+    int pos = 0;
+    for (List<Object> rightObj = iter.first(); !done && rightObj != null;
+         rightObj = loopAgain ? rightObj : iter.next(), rightFirst = loopAgain = false, pos++) {
+      System.arraycopy(prevSkip, 0, skip, 0, prevSkip.length);
+
+      boolean rightNull = rightObj == dummyObj[aliasNum];
+      if (hasFilter(order[aliasNum])) {
+        filterTags[aliasNum] = getFilterTag(rightObj);
+      }
+      skip[right] = rightNull;
+
+      if (type == JoinDesc.INNER_JOIN) {
+        innerJoin(skip, left, right);
+      } else if (type == JoinDesc.LEFT_SEMI_JOIN) {
+        if (innerJoin(skip, left, right)) {
+          // if left-semi-join found a match, skipping the rest of the rows in the
+          // rhs table of the semijoin
+          done = true;
+        }
+      } else if (type == JoinDesc.LEFT_OUTER_JOIN ||
+          (type == JoinDesc.FULL_OUTER_JOIN && rightNull)) {
+        int result = leftOuterJoin(skip, left, right);
+        if (result < 0) {
+          continue;
+        }
+        done = result > 0;
+      } else if (type == JoinDesc.RIGHT_OUTER_JOIN ||
+          (type == JoinDesc.FULL_OUTER_JOIN && allLeftNull)) {
+        if (allLeftFirst && !rightOuterJoin(skip, left, right) ||
+          !allLeftFirst && !innerJoin(skip, left, right)) {
+          continue;
+        }
+      } else if (type == JoinDesc.FULL_OUTER_JOIN) {
+        if (tryLOForFO && leftOuterJoin(skip, left, right) > 0) {
+          loopAgain = allLeftFirst;
+          done = !loopAgain;
+          tryLOForFO = false;
+        } else if (allLeftFirst && !rightOuterJoin(skip, left, right) ||
+          !allLeftFirst && !innerJoin(skip, left, right)) {
+          continue;
+        }
+      }
+      intermediate[aliasNum] = rightObj;
 
-      boolean rightFirst = true;
-      AbstractRowContainer.RowIterator<List<Object>> iter = aliasRes.rowIter();
-      for (List<Object> rightObj = iter.first(); !done && rightObj != null;
-           rightObj = loopAgain ? rightObj : iter.next(), rightFirst = loopAgain = false) {
-        System.arraycopy(prevSkip, 0, skip, 0, prevSkip.length);
+      if (aliasNum == numAliases - 1) {
+        if (!(allLeftNull && rightNull)) {
+          needToProduceLeftRow = true;
+          if (needsPostEvaluation) {
+            // This is only executed for outer joins with residual filters
+            boolean forward = createForwardJoinObject(skipVectors[numAliases - 1]);
+            producedRow |= forward;
+            if (!rightNull &&
+                    (type == JoinDesc.RIGHT_OUTER_JOIN || type == JoinDesc.FULL_OUTER_JOIN)) {
+              if (forward) {
+                // This record produced a result this time, remove it from the storage
+                // as it will not need to produce a result with NULL values anymore
+                rowContainerPostFilteredOuterJoin.put(pos, null);
+              } else {
+                // We need to store this record (if it is not done yet) in case
+                // we should produce a result
+                if (!rowContainerPostFilteredOuterJoin.containsKey(pos)) {
+                  Object[] row = Arrays.copyOfRange(forwardCache, offsets[aliasNum], offsets[aliasNum + 1]);
+                  rowContainerPostFilteredOuterJoin.put(pos, row);
+                }
+              }
+            }
+          } else {
+            createForwardJoinObject(skipVectors[numAliases - 1]);
+          }
+        }
+      } else {
+        // recursively call the join the other rhs tables
+        genObject(aliasNum + 1, allLeftFirst && rightFirst, allLeftNull && rightNull);
+      }
+    }
 
-        boolean rightNull = rightObj == dummyObj[aliasNum];
-        if (hasFilter(order[aliasNum])) {
-          filterTags[aliasNum] = getFilterTag(rightObj);
+    // Consolidation for outer joins
+    if (needsPostEvaluation && aliasNum == numAliases - 1 &&
+            needToProduceLeftRow && !producedRow && !allLeftNull) {
+      if (type == JoinDesc.LEFT_OUTER_JOIN || type == JoinDesc.FULL_OUTER_JOIN) {
+        // If it is a LEFT / FULL OUTER JOIN and the left record did not produce
+        // results, we need to take that record, replace the right side with NULL
+        // values, and produce the records
+        int i = numAliases - 1;
+        for (int j = offsets[i]; j < offsets[i + 1]; j++) {
+          forwardCache[j] = null;
         }
-        skip[right] = rightNull;
-
-        if (type == JoinDesc.INNER_JOIN) {
-          innerJoin(skip, left, right);
-        } else if (type == JoinDesc.LEFT_SEMI_JOIN) {
-          if (innerJoin(skip, left, right)) {
-            // if left-semi-join found a match, skipping the rest of the rows in the
-            // rhs table of the semijoin
-            done = true;
-          }
-        } else if (type == JoinDesc.LEFT_OUTER_JOIN ||
-            (type == JoinDesc.FULL_OUTER_JOIN && rightNull)) {
-          int result = leftOuterJoin(skip, left, right);
-          if (result < 0) {
-            continue;
-          }
-          done = result > 0;
-        } else if (type == JoinDesc.RIGHT_OUTER_JOIN ||
-            (type == JoinDesc.FULL_OUTER_JOIN && allLeftNull)) {
-          if (allLeftFirst && !rightOuterJoin(skip, left, right) ||
-            !allLeftFirst && !innerJoin(skip, left, right)) {
-            continue;
-          }
-        } else if (type == JoinDesc.FULL_OUTER_JOIN) {
-          if (tryLOForFO && leftOuterJoin(skip, left, right) > 0) {
-            loopAgain = allLeftFirst;
-            done = !loopAgain;
-            tryLOForFO = false;
-          } else if (allLeftFirst && !rightOuterJoin(skip, left, right) ||
-            !allLeftFirst && !innerJoin(skip, left, right)) {
+        internalForward(forwardCache, outputObjInspector);
+        countAfterReport = 0;
+      }
+    } else if (needsPostEvaluation && aliasNum == numAliases - 2) {
+      int nextType = condn[aliasNum].getType();
+      if (nextType == JoinDesc.RIGHT_OUTER_JOIN || nextType == JoinDesc.FULL_OUTER_JOIN) {
+        // If it is a RIGHT / FULL OUTER JOIN, we need to iterate through the row container
+        // that contains all the right records that did not produce results. Then, for each
+        // of those records, we replace the left side with NULL values, and produce the
+        // records.
+        // Observe that we only enter this block when we have finished iterating through
+        // all the left and right records (aliasNum == numAliases - 2), and thus, we have
+        // tried to evaluate the post-filter condition on every possible combination.
+        Arrays.fill(forwardCache, null);
+        for (Object[] row : rowContainerPostFilteredOuterJoin.values()) {
+          if (row == null) {
             continue;
           }
+          System.arraycopy(row, 0, forwardCache, offsets[numAliases - 1], row.length);
+          internalForward(forwardCache, outputObjInspector);
+          countAfterReport = 0;
         }
-        intermediate[aliasNum] = rightObj;
-
-        // recursively call the join the other rhs tables
-        genObject(aliasNum + 1, allLeftFirst && rightFirst, allLeftNull && rightNull);
       }
-    } else if (!allLeftNull) {
-      createForwardJoinObject(skipVectors[numAliases - 1]);
     }
   }
 
@@ -676,7 +846,6 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
         forwardCache[p++] = obj.get(j);
       }
     }
-
     internalForward(forwardCache, outputObjInspector);
     countAfterReport = 0;
   }
@@ -754,9 +923,9 @@ public abstract class CommonJoinOperator<T extends JoinDesc> extends
         }
       }
 
-      if (!hasEmpty && !mayHasMoreThanOne) {
+      if (!needsPostEvaluation && !hasEmpty && !mayHasMoreThanOne) {
         genAllOneUniqueJoinObject();
-      } else if (!hasEmpty && !hasLeftSemiJoin) {
+      } else if (!needsPostEvaluation && !hasEmpty && !hasLeftSemiJoin) {
         genUniqueJoinObject(0, 0);
       } else {
         genJoinObject();

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
index 6cbcab6..9718c48 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java
@@ -233,6 +233,23 @@ public class JoinUtil {
   /**
    * Returns true if the row does not pass through filters.
    */
+  protected static boolean isFiltered(Object row, List<ExprNodeEvaluator> filters,
+          List<ObjectInspector> filtersOIs) throws HiveException {
+    for (int i = 0; i < filters.size(); i++) {
+      ExprNodeEvaluator evaluator = filters.get(i);
+      Object condition = evaluator.evaluate(row);
+      Boolean result = (Boolean) ((PrimitiveObjectInspector) filtersOIs.get(i)).
+              getPrimitiveJavaObject(condition);
+      if (result == null || !result) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Returns true if the row does not pass through filters.
+   */
   protected static short isFiltered(Object row, List<ExprNodeEvaluator> filters,
       List<ObjectInspector> ois, int[] filterMap) throws HiveException {
     // apply join filters on the row.

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
index 7441f1e..beed6b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java
@@ -99,6 +99,7 @@ public class ConvertJoinMapJoin implements NodeProcessor {
         return retval;
       } else {
         fallbackToReduceSideJoin(joinOp, context);
+        return null;
       }
     }
 
@@ -232,6 +233,7 @@ public class ConvertJoinMapJoin implements NodeProcessor {
                   joinDesc.getFilters(), joinDesc.getNoOuterJoin(), null);
       mapJoinDesc.setNullSafes(joinDesc.getNullSafes());
       mapJoinDesc.setFilterMap(joinDesc.getFilterMap());
+      mapJoinDesc.setResidualFilterExprs(joinDesc.getResidualFilterExprs());
       mapJoinDesc.resetOrder();
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
index c6efd5b..b2893e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
@@ -1192,6 +1192,7 @@ public class MapJoinProcessor extends Transform {
     mapJoinDescriptor.setTagOrder(tagOrder);
     mapJoinDescriptor.setNullSafes(desc.getNullSafes());
     mapJoinDescriptor.setFilterMap(desc.getFilterMap());
+    mapJoinDescriptor.setResidualFilterExprs(desc.getResidualFilterExprs());
     if (!valueIndices.isEmpty()) {
       mapJoinDescriptor.setValueIndices(valueIndices);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 37baaf6..108c4e6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -1561,6 +1561,10 @@ public class Vectorizer implements PhysicalPlanResolver {
       LOG.info("Cannot vectorize map work small table expression");
       return false;
     }
+    if (desc.getResidualFilterExprs() != null && !desc.getResidualFilterExprs().isEmpty()) {
+      LOG.info("Cannot vectorize outer join with complex ON clause");
+      return false;
+    }
     return true;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 42a7ab9..2d1118c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -7888,6 +7888,16 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         join.getNoOuterJoin(), joinCondns, filterMap, joinKeys);
     desc.setReversedExprs(reversedExprs);
     desc.setFilterMap(join.getFilterMap());
+    // For outer joins, add filters that apply to more than one input
+    if (!join.getNoOuterJoin() && join.getPostJoinFilters().size() != 0) {
+      List<ExprNodeDesc> residualFilterExprs = new ArrayList<ExprNodeDesc>();
+      for (ASTNode cond : join.getPostJoinFilters()) {
+        residualFilterExprs.add(genExprNodeDesc(cond, outputRR));
+      }
+      desc.setResidualFilterExprs(residualFilterExprs);
+      // Clean post-conditions
+      join.getPostJoinFilters().clear();
+    }
 
     JoinOperator joinOp = (JoinOperator) OperatorFactory.getAndMakeChild(getOpContext(), desc,
         new RowSchema(outputRR.getColumnInfos()), rightOps);
@@ -8102,18 +8112,20 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     joinOp.getConf().setQBJoinTreeProps(joinTree);
     joinContext.put(joinOp, joinTree);
 
-    // Safety check for postconditions; currently we do not support them for outer join
-    if (joinTree.getPostJoinFilters().size() != 0 && !joinTree.getNoOuterJoin()) {
-      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION.getMsg());
-    }
-    Operator op = joinOp;
-    for(ASTNode condn : joinTree.getPostJoinFilters()) {
-      op = genFilterPlan(qb, condn, op, false);
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Generated " + op + " with post-filtering conditions after JOIN operator");
+    if (joinTree.getPostJoinFilters().size() != 0) {
+      // Safety check for postconditions
+      assert joinTree.getNoOuterJoin();
+      Operator op = joinOp;
+      for(ASTNode condn : joinTree.getPostJoinFilters()) {
+        op = genFilterPlan(qb, condn, op, false);
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Generated " + op + " with post-filtering conditions after JOIN operator");
+        }
       }
+      return op;
     }
-    return op;
+
+    return joinOp;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
index 2ca6b8f..3cd611c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
@@ -59,6 +59,8 @@ public class JoinDesc extends AbstractOperatorDesc {
   // alias to filter mapping
   private Map<Byte, List<ExprNodeDesc>> filters;
 
+  private List<ExprNodeDesc> residualFilterExprs;
+
   // pos of outer join alias=<pos of other alias:num of filters on outer join alias>xn
   // for example,
   // a left outer join b on a.k=b.k AND a.k>5 full outer join c on a.k=c.k AND a.k>10 AND c.k>20
@@ -193,6 +195,7 @@ public class JoinDesc extends AbstractOperatorDesc {
     this.tagOrder = clone.tagOrder;
     this.filters = clone.filters;
     this.filterMap = clone.filterMap;
+    this.residualFilterExprs = clone.residualFilterExprs;
     this.statistics = clone.statistics;
   }
 
@@ -294,6 +297,36 @@ public class JoinDesc extends AbstractOperatorDesc {
     this.filters = filters;
   }
 
+  @Explain(displayName = "residual filter predicates")
+  public String getResidualFilterExprsString() {
+    if (getResidualFilterExprs() == null || getResidualFilterExprs().size() == 0) {
+      return null;
+    }
+
+    StringBuilder sb = new StringBuilder();
+    boolean first = true;
+    for (ExprNodeDesc expr : getResidualFilterExprs()) {
+      if (!first) {
+        sb.append(" ");
+      }
+
+      first = false;
+      sb.append("{");
+      sb.append(expr.getExprString());
+      sb.append("}");
+    }
+
+    return sb.toString();
+  }
+
+  public List<ExprNodeDesc> getResidualFilterExprs() {
+    return residualFilterExprs;
+  }
+
+  public void setResidualFilterExprs(List<ExprNodeDesc> residualFilterExprs) {
+    this.residualFilterExprs = residualFilterExprs;
+  }
+
   @Explain(displayName = "outputColumnNames")
   public List<String> getOutputColumnNames() {
     return outputColumnNames;

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/queries/clientnegative/join45.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/join45.q b/ql/src/test/queries/clientnegative/join45.q
deleted file mode 100644
index 4e8db96..0000000
--- a/ql/src/test/queries/clientnegative/join45.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.strict.checks.cartesian.product=false;
-
--- SORT_QUERY_RESULTS
-
-CREATE TABLE mytable(val1 INT, val2 INT, val3 INT);
-
--- Outer join with complex pred: not supported
-EXPLAIN
-SELECT *
-FROM mytable src1 LEFT OUTER JOIN mytable src2
-ON (src1.val1+src2.val1>= 2450816
-  AND src1.val1+src2.val1<= 2451500);
-

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/queries/clientpositive/join46.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/join46.q b/ql/src/test/queries/clientpositive/join46.q
new file mode 100644
index 0000000..e9e9850
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/join46.q
@@ -0,0 +1,263 @@
+set hive.strict.checks.cartesian.product=false;
+set hive.join.emit.interval=2;
+
+CREATE TABLE test1 (key INT, value INT, col_1 STRING);
+INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car');
+
+CREATE TABLE test2 (key INT, value INT, col_2 STRING);
+INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None');
+
+
+-- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+-- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+-- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2);
+
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2);

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/queries/clientpositive/mapjoin46.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/mapjoin46.q b/ql/src/test/queries/clientpositive/mapjoin46.q
new file mode 100644
index 0000000..348dd67
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/mapjoin46.q
@@ -0,0 +1,264 @@
+set hive.auto.convert.join=true;
+set hive.strict.checks.cartesian.product=false;
+set hive.join.emit.interval=2;
+
+CREATE TABLE test1 (key INT, value INT, col_1 STRING);
+INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car');
+
+CREATE TABLE test2 (key INT, value INT, col_2 STRING);
+INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None');
+
+
+-- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+-- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+-- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Mixed ( FOJ (ROJ, LOJ) ) 
+EXPLAIN
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2);
+
+SELECT *
+FROM (
+  SELECT test1.key AS key1, test1.value AS value1, test1.col_1 AS col_1,
+         test2.key AS key2, test2.value AS value2, test2.col_2 AS col_2
+  FROM test1 RIGHT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq1
+FULL OUTER JOIN (
+  SELECT test1.key AS key3, test1.value AS value3, test1.col_1 AS col_3,
+         test2.key AS key4, test2.value AS value4, test2.col_2 AS col_4
+  FROM test1 LEFT OUTER JOIN test2
+  ON (test1.value=test2.value
+    AND (test1.key between 100 and 102
+      OR test2.key between 100 and 102))
+  ) sq2
+ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2);

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/queries/clientpositive/smb_mapjoin_46.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/smb_mapjoin_46.q b/ql/src/test/queries/clientpositive/smb_mapjoin_46.q
new file mode 100644
index 0000000..ad29414
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/smb_mapjoin_46.q
@@ -0,0 +1,240 @@
+set hive.strict.checks.cartesian.product=false;
+set hive.auto.convert.join=true;
+set hive.auto.convert.sortmerge.join=true;
+set hive.optimize.bucketmapjoin = true;
+set hive.optimize.bucketmapjoin.sortedmerge = true;
+set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
+set hive.join.emit.interval=2;
+set hive.exec.reducers.max = 1;
+set hive.merge.mapfiles=false;
+set hive.merge.mapredfiles=false; 
+
+CREATE TABLE aux1 (key INT, value INT, col_1 STRING);
+INSERT INTO aux1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car');
+
+CREATE TABLE test1 (key INT, value INT, col_1 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
+INSERT OVERWRITE TABLE test1
+SELECT * FROM aux1;
+
+CREATE TABLE aux2 (key INT, value INT, col_2 STRING);
+INSERT INTO aux2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None');
+
+CREATE TABLE test2 (key INT, value INT, col_2 STRING) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS;
+INSERT OVERWRITE TABLE test2
+SELECT * FROM aux2;
+
+
+-- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+-- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+-- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/queries/clientpositive/vectorized_join46.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorized_join46.q b/ql/src/test/queries/clientpositive/vectorized_join46.q
new file mode 100644
index 0000000..a0eb1b2
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vectorized_join46.q
@@ -0,0 +1,225 @@
+set hive.vectorized.execution.enabled=true;
+set hive.auto.convert.join=true;
+set hive.strict.checks.cartesian.product=false;
+set hive.join.emit.interval=2;
+
+CREATE TABLE test1 (key INT, value INT, col_1 STRING);
+INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'),
+    (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car');
+
+CREATE TABLE test2 (key INT, value INT, col_2 STRING);
+INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'),
+    (104, 3, 'Fli'), (105, NULL, 'None');
+
+
+-- Basic outer join
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value);
+
+-- Conjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102
+  AND test2.key between 100 and 102);
+
+-- Conjunction with pred on multiple inputs and none (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value AND true);
+
+-- Condition on one input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and single inputs (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (left outer join)
+EXPLAIN
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 LEFT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (right outer join)
+EXPLAIN
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 RIGHT OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+-- Disjunction with pred on multiple inputs and single inputs (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102
+  OR test2.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and left input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test1.key between 100 and 102);
+
+-- Disjunction with pred on multiple inputs and right input (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  OR test2.key between 100 and 102);
+
+-- Keys plus residual (full outer join)
+EXPLAIN
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));
+
+SELECT *
+FROM test1 FULL OUTER JOIN test2
+ON (test1.value=test2.value
+  AND (test1.key between 100 and 102
+    OR test2.key between 100 and 102));

http://git-wip-us.apache.org/repos/asf/hive/blob/9b2badc3/ql/src/test/results/clientnegative/join45.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/join45.q.out b/ql/src/test/results/clientnegative/join45.q.out
deleted file mode 100644
index 87ef769..0000000
--- a/ql/src/test/results/clientnegative/join45.q.out
+++ /dev/null
@@ -1,13 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE mytable(val1 INT, val2 INT, val3 INT)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@mytable
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE mytable(val1 INT, val2 INT, val3 INT)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@mytable
-FAILED: SemanticException [Error 10407]: Complex condition not supported for (LEFT|RIGHT|FULL) OUTER JOIN