You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2017/04/18 07:02:55 UTC

[08/11] hive git commit: HIVE-16146: If possible find a better way to filter the TestBeeLineDriver output(Peter Vary via Zoltan Haindrich, reviewed by Vihang Karajgaonkar)

http://git-wip-us.apache.org/repos/asf/hive/blob/2509e2fa/ql/src/test/results/clientpositive/beeline/smb_mapjoin_10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/smb_mapjoin_10.q.out b/ql/src/test/results/clientpositive/beeline/smb_mapjoin_10.q.out
index bdfaefb..50706f4 100644
--- a/ql/src/test/results/clientpositive/beeline/smb_mapjoin_10.q.out
+++ b/ql/src/test/results/clientpositive/beeline/smb_mapjoin_10.q.out
@@ -1,248 +1,107 @@
->>>  set hive.strict.checks.bucketing=false;
-No rows affected 
->>>  
->>>  
->>>  create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE; 
-INFO  : Compiling commandqueryId=(!!{queryId}!!): create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
-INFO  : PREHOOK: query: create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
-INFO  : PREHOOK: type: CREATETABLE
-INFO  : PREHOOK: Output: database:smb_mapjoin_10
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : Starting task [Stage-0:DDL] in serial mode
-INFO  : POSTHOOK: query: create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
-INFO  : POSTHOOK: type: CREATETABLE
-INFO  : POSTHOOK: Output: database:smb_mapjoin_10
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
-No rows affected 
->>>  
->>>  alter table tmp_smb_bucket_10 add partition (ds = '1');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): alter table tmp_smb_bucket_10 add partition (ds = '1')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): alter table tmp_smb_bucket_10 add partition (ds = '1')
-INFO  : PREHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '1')
-INFO  : PREHOOK: type: ALTERTABLE_ADDPARTS
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : Starting task [Stage-0:DDL] in serial mode
-INFO  : POSTHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '1')
-INFO  : POSTHOOK: type: ALTERTABLE_ADDPARTS
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=1
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query alter table tmp_smb_bucket_10 add partition (ds = '1')
-No rows affected 
->>>  alter table tmp_smb_bucket_10 add partition (ds = '2');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): alter table tmp_smb_bucket_10 add partition (ds = '2')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): alter table tmp_smb_bucket_10 add partition (ds = '2')
-INFO  : PREHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '2')
-INFO  : PREHOOK: type: ALTERTABLE_ADDPARTS
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : Starting task [Stage-0:DDL] in serial mode
-INFO  : POSTHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '2')
-INFO  : POSTHOOK: type: ALTERTABLE_ADDPARTS
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=2
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query alter table tmp_smb_bucket_10 add partition (ds = '2')
-No rows affected 
->>>  
->>>  -- add dummy files to make sure that the number of files in each partition is same as number of buckets
->>>   
->>>  load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : PREHOOK: type: LOAD
-INFO  : PREHOOK: Input: file:/!!ELIDED!!
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=1
-INFO  : Starting task [Stage-0:MOVE] in serial mode
-INFO  : Loading data to table smb_mapjoin_10.tmp_smb_bucket_10 partition (ds=1) from file:/!!ELIDED!!
-INFO  : Starting task [Stage-1:STATS] in serial mode
-INFO  : POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : POSTHOOK: type: LOAD
-INFO  : POSTHOOK: Input: file:/!!ELIDED!!
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=1
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-No rows affected 
->>>  load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : PREHOOK: type: LOAD
-INFO  : PREHOOK: Input: file:/!!ELIDED!!
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=1
-INFO  : Starting task [Stage-0:MOVE] in serial mode
-INFO  : Loading data to table smb_mapjoin_10.tmp_smb_bucket_10 partition (ds=1) from file:/!!ELIDED!!
-INFO  : Starting task [Stage-1:STATS] in serial mode
-INFO  : POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-INFO  : POSTHOOK: type: LOAD
-INFO  : POSTHOOK: Input: file:/!!ELIDED!!
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=1
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
-No rows affected 
->>>  
->>>  load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : PREHOOK: type: LOAD
-INFO  : PREHOOK: Input: file:/!!ELIDED!!
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=2
-INFO  : Starting task [Stage-0:MOVE] in serial mode
-INFO  : Loading data to table smb_mapjoin_10.tmp_smb_bucket_10 partition (ds=2) from file:/!!ELIDED!!
-INFO  : Starting task [Stage-1:STATS] in serial mode
-INFO  : POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : POSTHOOK: type: LOAD
-INFO  : POSTHOOK: Input: file:/!!ELIDED!!
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=2
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-No rows affected 
->>>  load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2');
-INFO  : Compiling commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:null, properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : PREHOOK: type: LOAD
-INFO  : PREHOOK: Input: file:/!!ELIDED!!
-INFO  : PREHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=2
-INFO  : Starting task [Stage-0:MOVE] in serial mode
-INFO  : Loading data to table smb_mapjoin_10.tmp_smb_bucket_10 partition (ds=2) from file:/!!ELIDED!!
-INFO  : Starting task [Stage-1:STATS] in serial mode
-INFO  : POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-INFO  : POSTHOOK: type: LOAD
-INFO  : POSTHOOK: Input: file:/!!ELIDED!!
-INFO  : POSTHOOK: Output: smb_mapjoin_10@tmp_smb_bucket_10@ds=2
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
-No rows affected 
->>>  set hive.cbo.enable=false;
-No rows affected 
->>>  set hive.optimize.bucketmapjoin = true;
-No rows affected 
->>>  set hive.optimize.bucketmapjoin.sortedmerge = true;
-No rows affected 
->>>  set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
-No rows affected 
->>>  
->>>  explain
-select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
-on (a.ds = '1' and b.ds = '2' and
-    a.userid = b.userid and
-    a.pageid = b.pageid and
-    a.postid = b.postid and
-    a.type = b.type);
-INFO  : Compiling commandqueryId=(!!{queryId}!!): explain
-select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
-on (a.ds = '1' and b.ds = '2' and
-    a.userid = b.userid and
-    a.pageid = b.pageid and
-    a.postid = b.postid and
-    a.type = b.type)
-INFO  : Semantic Analysis Completed
-INFO  : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:Explain, type:string, comment:null)], properties:null)
-INFO  : Completed compiling commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : Executing commandqueryId=(!!{queryId}!!): explain
-select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
-on (a.ds = '1' and b.ds = '2' and
-    a.userid = b.userid and
-    a.pageid = b.pageid and
-    a.postid = b.postid and
-    a.type = b.type)
-INFO  : PREHOOK: query: explain
-select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
-on (a.ds = '1' and b.ds = '2' and
-    a.userid = b.userid and
-    a.pageid = b.pageid and
-    a.postid = b.postid and
-    a.type = b.type)
-INFO  : PREHOOK: type: QUERY
-INFO  : Starting task [Stage-3:EXPLAIN] in serial mode
-INFO  : POSTHOOK: query: explain
+PREHOOK: query: create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tmp_smb_bucket_10
+POSTHOOK: query: create table tmp_smb_bucket_10(userid int, pageid int, postid int, type string) partitioned by (ds string) CLUSTERED BY (userid) SORTED BY (pageid, postid, type, userid) INTO 2 BUCKETS STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tmp_smb_bucket_10
+PREHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@tmp_smb_bucket_10
+POSTHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@tmp_smb_bucket_10
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=1
+PREHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@tmp_smb_bucket_10
+POSTHOOK: query: alter table tmp_smb_bucket_10 add partition (ds = '2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@tmp_smb_bucket_10
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@tmp_smb_bucket_10@ds=1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@tmp_smb_bucket_10@ds=1
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='1')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=1
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@tmp_smb_bucket_10@ds=2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_1.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
+PREHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@tmp_smb_bucket_10@ds=2
+POSTHOOK: query: load data local inpath '../../data/files/smbbucket_2.rc' INTO TABLE tmp_smb_bucket_10 partition(ds='2')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@tmp_smb_bucket_10@ds=2
+PREHOOK: query: explain
 select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
 on (a.ds = '1' and b.ds = '2' and
     a.userid = b.userid and
     a.pageid = b.pageid and
     a.postid = b.postid and
     a.type = b.type)
-INFO  : POSTHOOK: type: QUERY
-INFO  : Completed executing commandqueryId=(!!{queryId}!!); Time taken: !!ELIDED!! seconds
-INFO  : OK
-DEBUG : Shutting down query explain
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
 select /*+mapjoin(a)*/ * from tmp_smb_bucket_10 a join tmp_smb_bucket_10 b 
 on (a.ds = '1' and b.ds = '2' and
     a.userid = b.userid and
     a.pageid = b.pageid and
     a.postid = b.postid and
     a.type = b.type)
-'Explain'
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Map Operator Tree:'
-'          TableScan'
-'            alias: b'
-'            Statistics: Num rows: 3 Data size: 414 Basic stats: COMPLETE Column stats: NONE'
-'            Filter Operator'
-'              predicate: (userid is not null and pageid is not null and postid is not null and type is not null) (type: boolean)'
-'              Statistics: Num rows: 3 Data size: 414 Basic stats: COMPLETE Column stats: NONE'
-'              Sorted Merge Bucket Map Join Operator'
-'                condition map:'
-'                     Inner Join 0 to 1'
-'                keys:'
-'                  0 userid (type: int), pageid (type: int), postid (type: int), type (type: string)'
-'                  1 userid (type: int), pageid (type: int), postid (type: int), type (type: string)'
-'                outputColumnNames: _col0, _col1, _col2, _col3, _col8, _col9, _col10, _col11'
-'                Select Operator'
-'                  expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string), '1' (type: string), _col8 (type: int), _col9 (type: int), _col10 (type: int), _col11 (type: string), '2' (type: string)'
-'                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9'
-'                  File Output Operator'
-'                    compressed: false'
-'                    table:'
-'                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat'
-'                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'
-'                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-'      Processor Tree:'
-'        ListSink'
-''
-37 rows selected 
->>>  
->>>  !record
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 3 Data size: 414 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (userid is not null and pageid is not null and postid is not null and type is not null) (type: boolean)
+              Statistics: Num rows: 3 Data size: 414 Basic stats: COMPLETE Column stats: NONE
+              Sorted Merge Bucket Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 userid (type: int), pageid (type: int), postid (type: int), type (type: string)
+                  1 userid (type: int), pageid (type: int), postid (type: int), type (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col8, _col9, _col10, _col11
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col3 (type: string), '1' (type: string), _col8 (type: int), _col9 (type: int), _col10 (type: int), _col11 (type: string), '2' (type: string)
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+