You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rh...@apache.org on 2014/03/08 18:23:34 UTC

svn commit: r1575574 [2/2] - in /hive/branches/branch-0.13/ql/src: java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/optimizer/ java/org/apache/hadoop/hive/ql/optimizer/correlation/ java/org/apache/hado...

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/index_auto_update.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/index_auto_update.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/index_auto_update.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/index_auto_update.q.out Sat Mar  8 17:23:33 2014
@@ -55,22 +55,22 @@ POSTHOOK: Lineage: default__temp_temp_in
 POSTHOOK: Lineage: temp.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: temp.val SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 STAGE DEPENDENCIES:
-  Stage-4 is a root stage
-  Stage-10 depends on stages: Stage-4 , consists of Stage-7, Stage-6, Stage-8
-  Stage-7
-  Stage-0 depends on stages: Stage-7, Stage-6, Stage-9
-  Stage-1 depends on stages: Stage-0
-  Stage-0 depends on stages: Stage-1
+  Stage-6 is a root stage
+  Stage-12 depends on stages: Stage-6 , consists of Stage-9, Stage-8, Stage-10
+  Stage-9
+  Stage-0 depends on stages: Stage-9, Stage-8, Stage-11
   Stage-2 depends on stages: Stage-0
-  null depends on stages: Stage-1
+  Stage-1 depends on stages: Stage-2
   Stage-3 depends on stages: Stage-1
-  Stage-5 depends on stages: Stage-0
-  Stage-6
+  Stage-4 depends on stages: Stage-2
+  Stage-5 depends on stages: Stage-2
+  Stage-7 depends on stages: Stage-0
   Stage-8
-  Stage-9 depends on stages: Stage-8
+  Stage-10
+  Stage-11 depends on stages: Stage-10
 
 STAGE PLANS:
-  Stage: Stage-4
+  Stage: Stage-6
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -89,10 +89,10 @@ STAGE PLANS:
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: default.temp
 
-  Stage: Stage-10
+  Stage: Stage-12
     Conditional Operator
 
-  Stage: Stage-7
+  Stage: Stage-9
     Move Operator
       files:
           hdfs directory: true
@@ -108,7 +108,7 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.temp
 
-  Stage: Stage-1
+  Stage: Stage-2
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -143,7 +143,7 @@ STAGE PLANS:
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.default__temp_temp_index__
 
-  Stage: Stage-0
+  Stage: Stage-1
     Move Operator
       tables:
           replace: true
@@ -153,17 +153,17 @@ STAGE PLANS:
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.default__temp_temp_index__
 
-  Stage: Stage-2
+  Stage: Stage-3
     Stats-Aggr Operator
 
-  Stage: null
-
-  Stage: Stage-3
+  Stage: Stage-4
 
   Stage: Stage-5
+
+  Stage: Stage-7
     Stats-Aggr Operator
 
-  Stage: Stage-6
+  Stage: Stage-8
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -175,7 +175,7 @@ STAGE PLANS:
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.temp
 
-  Stage: Stage-8
+  Stage: Stage-10
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -187,7 +187,7 @@ STAGE PLANS:
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                   name: default.temp
 
-  Stage: Stage-9
+  Stage: Stage-11
     Move Operator
       files:
           hdfs directory: true

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out Sat Mar  8 17:23:33 2014
@@ -81,7 +81,7 @@ Obtaining error information
 
 Task failed!
 Task ID:
-  Stage-5
+  Stage-4
 
 Logs:
 

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/join35.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/join35.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/join35.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/join35.q.out Sat Mar  8 17:23:33 2014
@@ -143,13 +143,9 @@ TOK_QUERY
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-8 depends on stages: Stage-1, Stage-4 , consists of Stage-6, Stage-10, Stage-2
-  Stage-6 has a backup stage: Stage-2
-  Stage-0 depends on stages: Stage-2, Stage-6, Stage-7
+  Stage-6 depends on stages: Stage-1, Stage-4
+  Stage-0 depends on stages: Stage-6
   Stage-3 depends on stages: Stage-0
-  Stage-10 has a backup stage: Stage-2
-  Stage-7 depends on stages: Stage-10
-  Stage-2
   Stage-4 is a root stage
 
 STAGE PLANS:
@@ -258,15 +254,13 @@ STAGE PLANS:
               GatherStats: false
               MultiFileSpray: false
 
-  Stage: Stage-8
-    Conditional Operator
-
   Stage: Stage-6
     Map Reduce
       Map Operator Tree:
           TableScan
             GatherStats: false
             Union
+              Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
               Map Join Operator
                 condition map:
                      Inner Join 0 to 1
@@ -278,14 +272,17 @@ STAGE PLANS:
                   1 key (type: string)
                 outputColumnNames: _col1, _col2, _col3
                 Position of Big Table: 0
+                Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col2 (type: string), _col3 (type: string), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
                     GlobalTableId: 1
 #### A masked pattern was here ####
                     NumFilesPerFileSink: 1
+                    Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
 #### A masked pattern was here ####
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
@@ -308,6 +305,7 @@ STAGE PLANS:
           TableScan
             GatherStats: false
             Union
+              Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
               Map Join Operator
                 condition map:
                      Inner Join 0 to 1
@@ -319,14 +317,17 @@ STAGE PLANS:
                   1 key (type: string)
                 outputColumnNames: _col1, _col2, _col3
                 Position of Big Table: 0
+                Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col2 (type: string), _col3 (type: string), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
                     GlobalTableId: 1
 #### A masked pattern was here ####
                     NumFilesPerFileSink: 1
+                    Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
 #### A masked pattern was here ####
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
@@ -356,6 +357,7 @@ STAGE PLANS:
             x 
               TableScan
                 alias: x
+                Statistics: Num rows: 1 Data size: 216 Basic stats: COMPLETE Column stats: NONE
                 GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
@@ -470,339 +472,6 @@ STAGE PLANS:
     Stats-Aggr Operator
 #### A masked pattern was here ####
 
-  Stage: Stage-10
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-#### A masked pattern was here ####
-          Fetch Operator
-            limit: -1
-#### A masked pattern was here ####
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-#### A masked pattern was here ####
-          TableScan
-            GatherStats: false
-            Union
-              HashTable Sink Operator
-                condition expressions:
-                  0 {_col1}
-                  1 {key} {value}
-                keys:
-                  0 _col0 (type: string)
-                  1 key (type: string)
-                Position of Big Table: 1
-#### A masked pattern was here ####
-          TableScan
-            GatherStats: false
-            Union
-              HashTable Sink Operator
-                condition expressions:
-                  0 {_col1}
-                  1 {key} {value}
-                keys:
-                  0 _col0 (type: string)
-                  1 key (type: string)
-                Position of Big Table: 1
-
-  Stage: Stage-7
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: x
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              condition expressions:
-                0 {_col1}
-                1 {key} {value}
-              keys:
-                0 _col0 (type: string)
-                1 key (type: string)
-              outputColumnNames: _col1, _col2, _col3
-              Position of Big Table: 1
-              Select Operator
-                expressions: _col2 (type: string), _col3 (type: string), UDFToInteger(_col1) (type: int)
-                outputColumnNames: _col0, _col1, _col2
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        bucket_count -1
-                        columns key,value,val2
-                        columns.types string:string:int
-#### A masked pattern was here ####
-                        name default.dest_j1
-                        serialization.ddl struct dest_j1 { string key, string value, i32 val2}
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest_j1
-                  TotalFiles: 1
-                  GatherStats: true
-                  MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10001
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 0
-              rawDataSize 0
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-      Truncated Path -> Alias:
-        /src1 [x]
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Union
-              Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
-                tag: 0
-                value expressions: _col1 (type: bigint)
-          TableScan
-            GatherStats: false
-            Union
-              Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 18 Data size: 1802 Basic stats: COMPLETE Column stats: NONE
-                tag: 0
-                value expressions: _col1 (type: bigint)
-          TableScan
-            alias: x
-            Statistics: Num rows: 1 Data size: 216 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Reduce Output Operator
-              key expressions: key (type: string)
-              sort order: +
-              Map-reduce partition columns: key (type: string)
-              Statistics: Num rows: 1 Data size: 216 Basic stats: COMPLETE Column stats: NONE
-              tag: 1
-              value expressions: key (type: string), value (type: string)
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10001
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 0
-              rawDataSize 0
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-      Truncated Path -> Alias:
-        /src1 [x]
-#### A masked pattern was here ####
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          condition expressions:
-            0 {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
-          outputColumnNames: _col1, _col2, _col3
-          Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: _col2 (type: string), _col3 (type: string), UDFToInteger(_col1) (type: int)
-            outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              GlobalTableId: 1
-#### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              Statistics: Num rows: 19 Data size: 1982 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    bucket_count -1
-                    columns key,value,val2
-                    columns.types string:string:int
-#### A masked pattern was here ####
-                    name default.dest_j1
-                    serialization.ddl struct dest_j1 { string key, string value, i32 val2}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.dest_j1
-              TotalFiles: 1
-              GatherStats: true
-              MultiFileSpray: false
-
   Stage: Stage-4
     Map Reduce
       Map Operator Tree:

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_hook.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_hook.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_hook.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_hook.q.out Sat Mar  8 17:23:33 2014
@@ -62,7 +62,7 @@ Obtaining error information
 
 Task failed!
 Task ID:
-  Stage-6
+  Stage-7
 
 Logs:
 

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out Sat Mar  8 17:23:33 2014
@@ -1110,8 +1110,8 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
-            alias: src2
-            Statistics: Num rows: 1 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+            alias: src3
+            Statistics: Num rows: 9 Data size: 40 Basic stats: COMPLETE Column stats: NONE
             Map Join Operator
               condition map:
                    Right Outer Join0 to 1
@@ -1141,7 +1141,7 @@ STAGE PLANS:
             src1 
               Fetch Operator
                 limit: -1
-            src3 
+            src2 
               Fetch Operator
                 limit: -1
           Alias -> Map Local Operator Tree:
@@ -1149,10 +1149,10 @@ STAGE PLANS:
               TableScan
                 alias: src1
                 Statistics: Num rows: 1 Data size: 216 Basic stats: COMPLETE Column stats: NONE
-            src3 
+            src2 
               TableScan
-                alias: src3
-                Statistics: Num rows: 9 Data size: 40 Basic stats: COMPLETE Column stats: NONE
+                alias: src2
+                Statistics: Num rows: 1 Data size: 13 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Extract
           Statistics: Num rows: 19 Data size: 88 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/multiMapJoin2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/multiMapJoin2.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/multiMapJoin2.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/multiMapJoin2.q.out Sat Mar  8 17:23:33 2014
@@ -2223,7 +2223,7 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
-            alias: a
+            alias: b
             Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
             Map Join Operator
               condition map:
@@ -2253,13 +2253,13 @@ STAGE PLANS:
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
-            null-subquery1:x-subquery1:tmp:b 
+            null-subquery1:x-subquery1:tmp:a 
               Fetch Operator
                 limit: -1
           Alias -> Map Local Operator Tree:
-            null-subquery1:x-subquery1:tmp:b 
+            null-subquery1:x-subquery1:tmp:a 
               TableScan
-                alias: b
+                alias: a
                 Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Group By Operator

Modified: hive/branches/branch-0.13/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.13/ql/src/test/results/clientpositive/subquery_multiinsert.q.out?rev=1575574&r1=1575573&r2=1575574&view=diff
==============================================================================
--- hive/branches/branch-0.13/ql/src/test/results/clientpositive/subquery_multiinsert.q.out (original)
+++ hive/branches/branch-0.13/ql/src/test/results/clientpositive/subquery_multiinsert.q.out Sat Mar  8 17:23:33 2014
@@ -486,3 +486,523 @@ POSTHOOK: Lineage: src_5.value EXPRESSIO
 199	val_199
 199	val_199
 2	val_2
+PREHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+STAGE DEPENDENCIES:
+  Stage-10 is a root stage
+  Stage-13 depends on stages: Stage-10, Stage-14 , consists of Stage-12, Stage-4
+  Stage-12 has a backup stage: Stage-4
+  Stage-15 depends on stages: Stage-4, Stage-12
+  Stage-6 depends on stages: Stage-15
+  Stage-1 depends on stages: Stage-6
+  Stage-7 depends on stages: Stage-1
+  Stage-4
+  Stage-17 is a root stage
+  Stage-14 depends on stages: Stage-17
+  Stage-0 depends on stages: Stage-14
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-10
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: s1
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((key > '2') and key is null) (type: boolean)
+              Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: bigint)
+              outputColumnNames: _col0
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Group By Operator
+                keys: _col0 (type: bigint)
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-13
+    Conditional Operator
+
+  Stage: Stage-12
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Map Join Operator
+              condition map:
+                   Left Semi Join 0 to 1
+              condition expressions:
+                0 {key} {value}
+                1 
+              keys:
+                0 
+                1 
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+      Local Work:
+        Map Reduce Local Work
+          Alias -> Map Local Tables:
+            $INTNAME1 
+              Fetch Operator
+                limit: -1
+          Alias -> Map Local Operator Tree:
+            $INTNAME1 
+              TableScan
+
+  Stage: Stage-15
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        sq_2:s1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        sq_2:s1 
+          TableScan
+            alias: s1
+            Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key > '2') (type: boolean)
+              Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  condition expressions:
+                    0 {_col0} {_col1}
+                    1 {_col0}
+                  keys:
+                    0 _col0 (type: string)
+                    1 _col0 (type: string)
+
+  Stage: Stage-6
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Map Join Operator
+              condition map:
+                   Left Outer Join0 to 1
+              condition expressions:
+                0 {_col0} {_col1}
+                1 {_col0}
+              keys:
+                0 _col0 (type: string)
+                1 _col0 (type: string)
+              outputColumnNames: _col0, _col1, _col4
+              Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE
+              Filter Operator
+                predicate: ((1 = 1) and _col4 is null) (type: boolean)
+                Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col0 (type: string), _col1 (type: string)
+      Local Work:
+        Map Reduce Local Work
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                name: default.src_5
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_5
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+              value expressions: key (type: string), value (type: string)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Semi Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1}
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-17
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        sq_1:a 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        sq_1:a 
+          TableScan
+            alias: a
+            Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key > '9') (type: boolean)
+              Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  keys: _col0 (type: string), _col1 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE
+                  HashTable Sink Operator
+                    condition expressions:
+                      0 {key} {value}
+                      1 
+                    keys:
+                      0 key (type: string), value (type: string)
+                      1 _col0 (type: string), _col1 (type: string)
+
+  Stage: Stage-14
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            Map Join Operator
+              condition map:
+                   Left Semi Join 0 to 1
+              condition expressions:
+                0 {key} {value}
+                1 
+              keys:
+                0 key (type: string), value (type: string)
+                1 _col0 (type: string), _col1 (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
+              Filter Operator
+                predicate: (1 = 1) (type: boolean)
+                Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.src_4
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_4
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+PREHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_4
+PREHOOK: Output: default@src_5
+POSTHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_4
+POSTHOOK: Output: default@src_5
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from src_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_4
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_4
+#### A masked pattern was here ####
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+98	val_98
+92	val_92
+96	val_96
+95	val_95
+98	val_98
+90	val_90
+95	val_95
+90	val_90
+97	val_97
+90	val_90
+97	val_97
+PREHOOK: query: select * from src_5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_5
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_5
+#### A masked pattern was here ####
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+113	val_113
+113	val_113
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+119	val_119
+119	val_119
+119	val_119
+12	val_12
+12	val_12
+120	val_120
+120	val_120
+125	val_125
+125	val_125
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+129	val_129
+129	val_129
+131	val_131
+133	val_133
+134	val_134
+134	val_134
+136	val_136
+137	val_137
+137	val_137
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+143	val_143
+145	val_145
+146	val_146
+146	val_146
+149	val_149
+149	val_149
+15	val_15
+15	val_15
+150	val_150
+152	val_152
+152	val_152
+153	val_153
+155	val_155
+156	val_156
+157	val_157
+158	val_158
+160	val_160
+162	val_162
+163	val_163
+164	val_164
+164	val_164
+165	val_165
+165	val_165
+166	val_166
+167	val_167
+167	val_167
+167	val_167
+168	val_168
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+17	val_17
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+175	val_175
+175	val_175
+176	val_176
+176	val_176
+177	val_177
+178	val_178
+179	val_179
+179	val_179
+18	val_18
+18	val_18
+180	val_180
+181	val_181
+183	val_183
+186	val_186
+187	val_187
+187	val_187
+187	val_187
+189	val_189
+19	val_19
+190	val_190
+191	val_191
+191	val_191
+192	val_192
+193	val_193
+193	val_193
+193	val_193
+194	val_194
+195	val_195
+195	val_195
+196	val_196
+197	val_197
+197	val_197
+199	val_199
+199	val_199
+199	val_199
+2	val_2