You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2015/11/18 23:41:12 UTC

[30/34] hive git commit: HIVE-12434: Merge branch 'spark' to master

http://git-wip-us.apache.org/repos/asf/hive/blob/0a88760f/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
new file mode 100644
index 0000000..90a8f59
--- /dev/null
+++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
@@ -0,0 +1,370 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+-- orc merge file tests for dynamic partition case
+
+create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_merge5
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+-- orc merge file tests for dynamic partition case
+
+create table orc_merge5 (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_merge5
+PREHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_merge5a
+POSTHOOK: query: create table orc_merge5a (userid bigint, string1 string, subtype double, decimal1 decimal, ts timestamp) partitioned by (st double) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_merge5a
+PREHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@orc_merge5
+POSTHOOK: query: load data local inpath '../../data/files/orc_split_elim.orc' into table orc_merge5
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@orc_merge5
+PREHOOK: query: explain insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: orc_merge5
+                  Statistics: Num rows: 919 Data size: 246402 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                    Statistics: Num rows: 919 Data size: 246402 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 919 Data size: 246402 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                          serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                          name: default.orc_merge5a
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            st 
+          replace: true
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: default.orc_merge5a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+PREHOOK: query: insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5
+PREHOOK: Output: default@orc_merge5a
+POSTHOOK: query: insert overwrite table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: Output: default@orc_merge5a@st=1.8
+POSTHOOK: Output: default@orc_merge5a@st=8.0
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+PREHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5
+PREHOOK: Output: default@orc_merge5a
+POSTHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: Output: default@orc_merge5a@st=1.8
+POSTHOOK: Output: default@orc_merge5a@st=8.0
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+PREHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5
+PREHOOK: Output: default@orc_merge5a
+POSTHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: Output: default@orc_merge5a@st=1.8
+POSTHOOK: Output: default@orc_merge5a@st=8.0
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+PREHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5
+PREHOOK: Output: default@orc_merge5a
+POSTHOOK: query: insert into table orc_merge5a partition (st) select userid,string1,subtype,decimal1,ts,subtype from orc_merge5 order by userid
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: Output: default@orc_merge5a@st=1.8
+POSTHOOK: Output: default@orc_merge5a@st=8.0
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=0.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=1.8).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=8.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).decimal1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:decimal1, type:decimal(10,0), comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).string1 SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).subtype SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:subtype, type:double, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).ts SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:ts, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: orc_merge5a PARTITION(st=80.0).userid SIMPLE [(orc_merge5)orc_merge5.FieldSchema(name:userid, type:bigint, comment:null), ]
+PREHOOK: query: analyze table orc_merge5a partition(st=80.0) compute statistics noscan
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: query: analyze table orc_merge5a partition(st=80.0) compute statistics noscan
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+PREHOOK: query: analyze table orc_merge5a partition(st=0.8) compute statistics noscan
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: query: analyze table orc_merge5a partition(st=0.8) compute statistics noscan
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+Found 4 items
+#### A masked pattern was here ####
+Found 4 items
+#### A masked pattern was here ####
+PREHOOK: query: show partitions orc_merge5a
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@orc_merge5a
+POSTHOOK: query: show partitions orc_merge5a
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@orc_merge5a
+st=0.8
+st=1.8
+st=8.0
+st=80.0
+PREHOOK: query: select * from orc_merge5a where userid<=13
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Input: default@orc_merge5a@st=0.8
+PREHOOK: Input: default@orc_merge5a@st=1.8
+PREHOOK: Input: default@orc_merge5a@st=8.0
+PREHOOK: Input: default@orc_merge5a@st=80.0
+#### A masked pattern was here ####
+POSTHOOK: query: select * from orc_merge5a where userid<=13
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Input: default@orc_merge5a@st=0.8
+POSTHOOK: Input: default@orc_merge5a@st=1.8
+POSTHOOK: Input: default@orc_merge5a@st=8.0
+POSTHOOK: Input: default@orc_merge5a@st=80.0
+#### A masked pattern was here ####
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+PREHOOK: query: explain alter table orc_merge5a partition(st=80.0) concatenate
+PREHOOK: type: ALTER_PARTITION_MERGE
+POSTHOOK: query: explain alter table orc_merge5a partition(st=80.0) concatenate
+POSTHOOK: type: ALTER_PARTITION_MERGE
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+  Stage-2 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-0
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          partition:
+            st 80.0
+          replace: true
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: default.orc_merge5a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+PREHOOK: query: alter table orc_merge5a partition(st=80.0) concatenate
+PREHOOK: type: ALTER_PARTITION_MERGE
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: query: alter table orc_merge5a partition(st=80.0) concatenate
+POSTHOOK: type: ALTER_PARTITION_MERGE
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+PREHOOK: query: alter table orc_merge5a partition(st=0.8) concatenate
+PREHOOK: type: ALTER_PARTITION_MERGE
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: query: alter table orc_merge5a partition(st=0.8) concatenate
+POSTHOOK: type: ALTER_PARTITION_MERGE
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+PREHOOK: query: analyze table orc_merge5a partition(st=80.0) compute statistics noscan
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=80.0
+POSTHOOK: query: analyze table orc_merge5a partition(st=80.0) compute statistics noscan
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=80.0
+PREHOOK: query: analyze table orc_merge5a partition(st=0.8) compute statistics noscan
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a
+PREHOOK: Output: default@orc_merge5a@st=0.8
+POSTHOOK: query: analyze table orc_merge5a partition(st=0.8) compute statistics noscan
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@st=0.8
+Found 3 items
+#### A masked pattern was here ####
+Found 3 items
+#### A masked pattern was here ####
+PREHOOK: query: show partitions orc_merge5a
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@orc_merge5a
+POSTHOOK: query: show partitions orc_merge5a
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@orc_merge5a
+st=0.8
+st=1.8
+st=8.0
+st=80.0
+PREHOOK: query: select * from orc_merge5a where userid<=13
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_merge5a
+PREHOOK: Input: default@orc_merge5a@st=0.8
+PREHOOK: Input: default@orc_merge5a@st=1.8
+PREHOOK: Input: default@orc_merge5a@st=8.0
+PREHOOK: Input: default@orc_merge5a@st=80.0
+#### A masked pattern was here ####
+POSTHOOK: query: select * from orc_merge5a where userid<=13
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Input: default@orc_merge5a@st=0.8
+POSTHOOK: Input: default@orc_merge5a@st=1.8
+POSTHOOK: Input: default@orc_merge5a@st=8.0
+POSTHOOK: Input: default@orc_merge5a@st=80.0
+#### A masked pattern was here ####
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+13	bar	80.0	2	1969-12-31 16:00:05	80.0
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+2	foo	0.8	1	1969-12-31 16:00:00	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8
+5	eat	0.8	6	1969-12-31 16:00:20	0.8

http://git-wip-us.apache.org/repos/asf/hive/blob/0a88760f/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out
new file mode 100644
index 0000000..e63e1f1
--- /dev/null
+++ b/ql/src/test/results/clientpositive/spark/vector_inner_join.q.out
@@ -0,0 +1,853 @@
+PREHOOK: query: CREATE TABLE orc_table_1a(a INT) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_1a
+POSTHOOK: query: CREATE TABLE orc_table_1a(a INT) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_1a
+PREHOOK: query: CREATE TABLE orc_table_2a(c INT) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_2a
+POSTHOOK: query: CREATE TABLE orc_table_2a(c INT) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_2a
+PREHOOK: query: insert into table orc_table_1a values(1),(1), (2),(3)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@orc_table_1a
+POSTHOOK: query: insert into table orc_table_1a values(1),(1), (2),(3)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@orc_table_1a
+POSTHOOK: Lineage: orc_table_1a.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: insert into table orc_table_2a values(0),(2), (3),(null),(4)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@orc_table_2a
+POSTHOOK: query: insert into table orc_table_2a values(0),(2), (3),(null),(4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@orc_table_2a
+POSTHOOK: Lineage: orc_table_2a.c EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: explain
+select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col4
+                      input vertices:
+                        0 Map 1
+                      Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col4 (type: int)
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1a
+PREHOOK: Input: default@orc_table_2a
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.a from orc_table_2a t2 join orc_table_1a t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1a
+POSTHOOK: Input: default@orc_table_2a
+#### A masked pattern was here ####
+3
+PREHOOK: query: explain
+select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: a (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                        Spark HashTable Sink Operator
+                          keys:
+                            0 _col0 (type: int)
+                            1 _col0 (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: c (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                      Map Join Operator
+                        condition map:
+                             Left Semi Join 0 to 1
+                        keys:
+                          0 _col0 (type: int)
+                          1 _col0 (type: int)
+                        outputColumnNames: _col0
+                        input vertices:
+                          1 Map 2
+                        Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1a
+PREHOOK: Input: default@orc_table_2a
+#### A masked pattern was here ####
+POSTHOOK: query: select t2.c from orc_table_2a t2 left semi join orc_table_1a t1 on t1.a = t2.c where t2.c > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1a
+POSTHOOK: Input: default@orc_table_2a
+#### A masked pattern was here ####
+3
+PREHOOK: query: CREATE TABLE orc_table_1b(v1 STRING, a INT) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_1b
+POSTHOOK: query: CREATE TABLE orc_table_1b(v1 STRING, a INT) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_1b
+PREHOOK: query: CREATE TABLE orc_table_2b(c INT, v2 STRING) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_2b
+POSTHOOK: query: CREATE TABLE orc_table_2b(c INT, v2 STRING) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_2b
+PREHOOK: query: insert into table orc_table_1b values("one", 1),("one", 1), ("two", 2),("three", 3)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__3
+PREHOOK: Output: default@orc_table_1b
+POSTHOOK: query: insert into table orc_table_1b values("one", 1),("one", 1), ("two", 2),("three", 3)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__3
+POSTHOOK: Output: default@orc_table_1b
+POSTHOOK: Lineage: orc_table_1b.a EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: orc_table_1b.v1 SIMPLE [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: insert into table orc_table_2b values(0, "ZERO"),(2, "TWO"), (3, "THREE"),(null, "<NULL>"),(4, "FOUR")
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@orc_table_2b
+POSTHOOK: query: insert into table orc_table_2b values(0, "ZERO"),(2, "TWO"), (3, "THREE"),(null, "<NULL>"),(4, "FOUR")
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@orc_table_2b
+POSTHOOK: Lineage: orc_table_2b.c EXPRESSION [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: orc_table_2b.v2 SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: explain
+select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col5, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col5 (type: string), _col6 (type: int)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.v1, t1.a from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+three	3
+PREHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col0, _col1, _col5, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col5 (type: string), _col6 (type: int), _col0 (type: int), _col1 (type: string)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.v1, t1.a, t2.c, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+three	3	3	THREE
+PREHOOK: query: explain
+select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col0, _col1, _col5, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col5 (type: string), (_col6 * 2) (type: int), (_col0 * 5) (type: int), _col1 (type: string)
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.v1, t1.a*2, t2.c*5, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+three	6	15	THREE
+PREHOOK: query: explain
+select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col0, _col1, _col5
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col5 (type: string), _col1 (type: string), _col0 (type: int)
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+three	THREE	3
+PREHOOK: query: explain
+select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 c (type: int)
+                        1 a (type: int)
+                      outputColumnNames: _col1, _col5, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col6 (type: int), _col5 (type: string), _col1 (type: string)
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_2b t2 join orc_table_1b t1 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+3	three	THREE
+PREHOOK: query: explain
+select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 a (type: int)
+                        1 c (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 a (type: int)
+                        1 c (type: int)
+                      outputColumnNames: _col0, _col5, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col0 (type: string), _col6 (type: string), _col5 (type: int)
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.v1, t2.v2, t2.c from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+three	THREE	3
+PREHOOK: query: explain
+select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 5 Data size: 456 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (c > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 a (type: int)
+                        1 c (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 4 Data size: 364 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (a > 2) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 91 Basic stats: COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 a (type: int)
+                        1 c (type: int)
+                      outputColumnNames: _col0, _col1, _col6
+                      input vertices:
+                        1 Map 2
+                      Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                      Select Operator
+                        expressions: _col1 (type: int), _col0 (type: string), _col6 (type: string)
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                        File Output Operator
+                          compressed: false
+                          Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+                          table:
+                              input format: org.apache.hadoop.mapred.TextInputFormat
+                              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1b
+PREHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+POSTHOOK: query: select t1.a, t1.v1, t2.v2 from orc_table_1b t1 join orc_table_2b t2 on t1.a = t2.c where t1.a > 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1b
+POSTHOOK: Input: default@orc_table_2b
+#### A masked pattern was here ####
+3	three	THREE

http://git-wip-us.apache.org/repos/asf/hive/blob/0a88760f/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out b/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out
new file mode 100644
index 0000000..22c1b6a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out
@@ -0,0 +1,242 @@
+PREHOOK: query: CREATE TABLE orc_table_1(v1 STRING, a INT) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_1
+POSTHOOK: query: CREATE TABLE orc_table_1(v1 STRING, a INT) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_1
+PREHOOK: query: CREATE TABLE orc_table_2(c INT, v2 STRING) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_table_2
+POSTHOOK: query: CREATE TABLE orc_table_2(c INT, v2 STRING) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_table_2
+PREHOOK: query: insert into table orc_table_1 values ("<null1>", null),("one", 1),("one", 1),("two", 2),("three", 3),("<null2>", null)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@orc_table_1
+POSTHOOK: query: insert into table orc_table_1 values ("<null1>", null),("one", 1),("one", 1),("two", 2),("three", 3),("<null2>", null)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@orc_table_1
+POSTHOOK: Lineage: orc_table_1.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+POSTHOOK: Lineage: orc_table_1.v1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: insert into table orc_table_2 values (0, "ZERO"),(2, "TWO"), (3, "THREE"),(null, "<NULL1>"),(4, "FOUR"),(null, "<NULL2>")
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: default@orc_table_2
+POSTHOOK: query: insert into table orc_table_2 values (0, "ZERO"),(2, "TWO"), (3, "THREE"),(null, "<NULL1>"),(4, "FOUR"),(null, "<NULL2>")
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: default@orc_table_2
+POSTHOOK: Lineage: orc_table_2.c EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: orc_table_2.v2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: select * from orc_table_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from orc_table_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1
+#### A masked pattern was here ####
+<null1>	NULL
+<null2>	NULL
+one	1
+one	1
+three	3
+two	2
+PREHOOK: query: select * from orc_table_2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from orc_table_2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+0	ZERO
+2	TWO
+3	THREE
+4	FOUR
+NULL	<NULL1>
+NULL	<NULL2>
+PREHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer join orc_table_2 t2 on t1.a = t2.c
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer join orc_table_2 t2 on t1.a = t2.c
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 6 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+                  Spark HashTable Sink Operator
+                    keys:
+                      0 a (type: int)
+                      1 c (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 6 Data size: 544 Basic stats: COMPLETE Column stats: NONE
+                  Map Join Operator
+                    condition map:
+                         Left Outer Join0 to 1
+                    keys:
+                      0 a (type: int)
+                      1 c (type: int)
+                    outputColumnNames: _col0, _col1, _col5, _col6
+                    input vertices:
+                      1 Map 2
+                    Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), _col1 (type: int), _col5 (type: int), _col6 (type: string)
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer join orc_table_2 t2 on t1.a = t2.c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1
+PREHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 left outer join orc_table_2 t2 on t1.a = t2.c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1
+POSTHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+<null1>	NULL	NULL	NULL
+<null2>	NULL	NULL	NULL
+one	1	NULL	NULL
+one	1	NULL	NULL
+three	3	3	THREE
+two	2	2	TWO
+PREHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer join orc_table_2 t2 on t1.a = t2.c
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer join orc_table_2 t2 on t1.a = t2.c
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: t1
+                  Statistics: Num rows: 6 Data size: 544 Basic stats: COMPLETE Column stats: NONE
+                  Spark HashTable Sink Operator
+                    keys:
+                      0 a (type: int)
+                      1 c (type: int)
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 2 
+            Map Operator Tree:
+                TableScan
+                  alias: t2
+                  Statistics: Num rows: 6 Data size: 550 Basic stats: COMPLETE Column stats: NONE
+                  Map Join Operator
+                    condition map:
+                         Right Outer Join0 to 1
+                    keys:
+                      0 a (type: int)
+                      1 c (type: int)
+                    outputColumnNames: _col0, _col1, _col5, _col6
+                    input vertices:
+                      0 Map 1
+                    Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: _col0 (type: string), _col1 (type: int), _col5 (type: int), _col6 (type: string)
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 6 Data size: 598 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+            Local Work:
+              Map Reduce Local Work
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer join orc_table_2 t2 on t1.a = t2.c
+PREHOOK: type: QUERY
+PREHOOK: Input: default@orc_table_1
+PREHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+select t1.v1, t1.a, t2.c, t2.v2 from orc_table_1 t1 right outer join orc_table_2 t2 on t1.a = t2.c
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@orc_table_1
+POSTHOOK: Input: default@orc_table_2
+#### A masked pattern was here ####
+NULL	NULL	0	ZERO
+NULL	NULL	4	FOUR
+NULL	NULL	NULL	<NULL1>
+NULL	NULL	NULL	<NULL2>
+three	3	3	THREE
+two	2	2	TWO