You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vi...@apache.org on 2014/01/30 21:46:19 UTC

svn commit: r1562955 [3/14] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ ql/sr...

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out Thu Jan 30 20:46:16 2014
@@ -475,11 +475,9 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_big) a) (TOK_TABREF (TOK_TABNAME bucket_small) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-5 is a root stage , consists of Stage-6, Stage-7, Stage-1
-  Stage-6 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-6
-  Stage-7 has a backup stage: Stage-1
-  Stage-4 depends on stages: Stage-7
+  Stage-5 is a root stage , consists of Stage-3, Stage-4, Stage-1
+  Stage-3 has a backup stage: Stage-1
+  Stage-4 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -487,70 +485,6 @@ STAGE PLANS:
   Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-6
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        b 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 114
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-      Alias -> Map Local Operator Tree:
-        b 
-          TableScan
-            alias: b
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 0
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -584,6 +518,58 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            b 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 114
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+          Alias -> Map Local Operator Tree:
+            b 
+              TableScan
+                alias: b
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -755,114 +741,6 @@ STAGE PLANS:
               GatherStats: false
               MultiFileSpray: false
 
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 4
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_big
-                    numFiles 4
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_big { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 4
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_big
-                      partition_columns ds
-                      serialization.ddl struct bucket_big { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_big
-                  name: default.bucket_big
-                Partition
-                  base file name: ds=2008-04-09
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 4
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_big
-                    numFiles 4
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_big { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 4
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_big
-                      partition_columns ds
-                      serialization.ddl struct bucket_big { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_big
-                  name: default.bucket_big
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-4
     Map Reduce
       Alias -> Map Operator Tree:
@@ -896,6 +774,102 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 4
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_big
+                        numFiles 4
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_big { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 5812
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 4
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_big
+                          partition_columns ds
+                          serialization.ddl struct bucket_big { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_big
+                      name: default.bucket_big
+                    Partition
+                      base file name: ds=2008-04-09
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-09
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 4
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_big
+                        numFiles 4
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_big { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 5812
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 4
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_big
+                          partition_columns ds
+                          serialization.ddl struct bucket_big { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_big
+                      name: default.bucket_big
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out Thu Jan 30 20:46:16 2014
@@ -83,77 +83,10 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_small) a) (TOK_TABREF (TOK_TABNAME bucket_big) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-5 is a root stage
-  Stage-2 depends on stages: Stage-5
+  Stage-2 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 114
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            Statistics:
-                numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
@@ -197,6 +130,60 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 114
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                Statistics:
+                    numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -409,77 +396,10 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_small) a) (TOK_TABREF (TOK_TABNAME bucket_big) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-5 is a root stage
-  Stage-2 depends on stages: Stage-5
+  Stage-2 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 114
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            Statistics:
-                numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
@@ -523,6 +443,60 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 114
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                Statistics:
+                    numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -729,83 +703,10 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_small) a) (TOK_TABREF (TOK_TABNAME bucket_big) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-3 is a root stage
-  Stage-1 depends on stages: Stage-3
+  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-3
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 114
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            Statistics:
-                numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-      Bucket Mapjoin Context:
-          Alias Bucket Base File Name Mapping:
-            a {ds=2008-04-08/srcsortbucket1outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-08/srcsortbucket2outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-08/srcsortbucket3outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-08/srcsortbucket4outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-09/srcsortbucket1outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-09/srcsortbucket2outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-09/srcsortbucket3outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-09/srcsortbucket4outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt]}
-          Alias Bucket File Name Mapping:
-#### A masked pattern was here ####
-          Alias Bucket Output File Name Mapping:
-#### A masked pattern was here ####
-
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
@@ -850,6 +751,66 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 114
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                Statistics:
+                    numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
+                GatherStats: false
+          Bucket Mapjoin Context:
+              Alias Bucket Base File Name Mapping:
+                a {ds=2008-04-08/srcsortbucket1outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-08/srcsortbucket2outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-08/srcsortbucket3outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-08/srcsortbucket4outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-09/srcsortbucket1outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-09/srcsortbucket2outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt], ds=2008-04-09/srcsortbucket3outof4.txt=[ds=2008-04-08/smallsrcsortbucket1outof4.txt], ds=2008-04-09/srcsortbucket4outof4.txt=[ds=2008-04-08/smallsrcsortbucket2outof4.txt]}
+              Alias Bucket File Name Mapping:
+#### A masked pattern was here ####
+              Alias Bucket Output File Name Mapping:
+#### A masked pattern was here ####
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out Thu Jan 30 20:46:16 2014
@@ -111,206 +111,10 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_small) a) (TOK_TABREF (TOK_TABNAME bucket_medium) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME bucket_big) c) (= (. (TOK_TABLE_OR_COL c) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME bucket_medium) d) (= (. (TOK_TABLE_OR_COL c) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-8 is a root stage
-  Stage-2 depends on stages: Stage-8
+  Stage-2 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-8
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 114
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-        b 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 3
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_medium
-                    numFiles 3
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_medium { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 170
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 3
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_medium
-                      partition_columns ds
-                      serialization.ddl struct bucket_medium { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_medium
-                  name: default.bucket_medium
-        d 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 3
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_medium
-                    numFiles 3
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_medium { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 170
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 3
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_medium
-                      partition_columns ds
-                      serialization.ddl struct bucket_medium { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_medium
-                  name: default.bucket_medium
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            Statistics:
-                numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-                2 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-                2 [Column[key]]
-              Position of Big Table: 2
-        b 
-          TableScan
-            alias: b
-            Statistics:
-                numRows: 1 dataSize: 170 basicStatsState: COMPLETE colStatsState: NONE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-                2 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-                2 [Column[key]]
-              Position of Big Table: 2
-        d 
-          TableScan
-            alias: d
-            Statistics:
-                numRows: 0 dataSize: 170 basicStatsState: PARTIAL colStatsState: COMPLETE
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 []
-                1 []
-              Position of Big Table: 0
-
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
@@ -370,6 +174,167 @@ STAGE PLANS:
                             type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 114
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+            b 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 3
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_medium
+                        numFiles 3
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_medium { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 170
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 3
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_medium
+                          partition_columns ds
+                          serialization.ddl struct bucket_medium { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_medium
+                      name: default.bucket_medium
+            d 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 3
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_medium
+                        numFiles 3
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_medium { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 170
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 3
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_medium
+                          partition_columns ds
+                          serialization.ddl struct bucket_medium { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_medium
+                      name: default.bucket_medium
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                Statistics:
+                    numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE
+                GatherStats: false
+            b 
+              TableScan
+                alias: b
+                Statistics:
+                    numRows: 1 dataSize: 170 basicStatsState: COMPLETE colStatsState: NONE
+                GatherStats: false
+            d 
+              TableScan
+                alias: d
+                Statistics:
+                    numRows: 0 dataSize: 170 basicStatsState: PARTIAL colStatsState: COMPLETE
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_14.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_14.q.out Thu Jan 30 20:46:16 2014
@@ -46,9 +46,8 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME tbl1) a) (TOK_TABREF (TOK_TABNAME tbl2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-4 is a root stage , consists of Stage-5, Stage-1
-  Stage-5 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-5
+  Stage-4 is a root stage , consists of Stage-3, Stage-1
+  Stage-3 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -56,26 +55,6 @@ STAGE PLANS:
   Stage: Stage-4
     Conditional Operator
 
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        b 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        b 
-          TableScan
-            alias: b
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 0
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -108,6 +87,14 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            b 
+              Fetch Operator
+                limit: -1
+          Alias -> Map Local Operator Tree:
+            b 
+              TableScan
+                alias: b
       Reduce Operator Tree:
         Group By Operator
           aggregations:
@@ -229,9 +216,8 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME tbl1) a) (TOK_TABREF (TOK_TABNAME tbl2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-4 is a root stage , consists of Stage-5, Stage-1
-  Stage-5 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-5
+  Stage-4 is a root stage , consists of Stage-3, Stage-1
+  Stage-3 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -239,26 +225,6 @@ STAGE PLANS:
   Stage: Stage-4
     Conditional Operator
 
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -291,6 +257,14 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
       Reduce Operator Tree:
         Group By Operator
           aggregations:

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_15.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_15.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_15.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_15.q.out Thu Jan 30 20:46:16 2014
@@ -44,9 +44,8 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME tbl1) a) (TOK_TABREF (TOK_TABNAME tbl2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-4 is a root stage , consists of Stage-5, Stage-1
-  Stage-5 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-5
+  Stage-4 is a root stage , consists of Stage-3, Stage-1
+  Stage-3 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -54,26 +53,6 @@ STAGE PLANS:
   Stage: Stage-4
     Conditional Operator
 
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        b 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        b 
-          TableScan
-            alias: b
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 0
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -106,6 +85,14 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            b 
+              Fetch Operator
+                limit: -1
+          Alias -> Map Local Operator Tree:
+            b 
+              TableScan
+                alias: b
       Reduce Operator Tree:
         Group By Operator
           aggregations:
@@ -194,9 +181,8 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME tbl1) a) (TOK_TABREF (TOK_TABNAME tbl2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-4 is a root stage , consists of Stage-5, Stage-1
-  Stage-5 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-5
+  Stage-4 is a root stage , consists of Stage-3, Stage-1
+  Stage-3 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -204,26 +190,6 @@ STAGE PLANS:
   Stage: Stage-4
     Conditional Operator
 
-  Stage: Stage-5
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -256,6 +222,14 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
       Reduce Operator Tree:
         Group By Operator
           aggregations:

Modified: hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out?rev=1562955&r1=1562954&r2=1562955&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out Thu Jan 30 20:46:16 2014
@@ -267,11 +267,9 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME bucket_big) a) (TOK_TABREF (TOK_TABNAME bucket_small) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
 
 STAGE DEPENDENCIES:
-  Stage-5 is a root stage , consists of Stage-6, Stage-7, Stage-1
-  Stage-6 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-6
-  Stage-7 has a backup stage: Stage-1
-  Stage-4 depends on stages: Stage-7
+  Stage-5 is a root stage , consists of Stage-3, Stage-4, Stage-1
+  Stage-3 has a backup stage: Stage-1
+  Stage-4 has a backup stage: Stage-1
   Stage-1
   Stage-0 is a root stage
 
@@ -279,70 +277,6 @@ STAGE PLANS:
   Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-6
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        b 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 4
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_small
-                    numFiles 4
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_small { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 226
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 4
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_small
-                      partition_columns ds
-                      serialization.ddl struct bucket_small { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_small
-                  name: default.bucket_small
-      Alias -> Map Local Operator Tree:
-        b 
-          TableScan
-            alias: b
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 0
-
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
@@ -376,6 +310,58 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            b 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 4
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_small
+                        numFiles 4
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_small { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 226
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 4
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_small
+                          partition_columns ds
+                          serialization.ddl struct bucket_small { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_small
+                      name: default.bucket_small
+          Alias -> Map Local Operator Tree:
+            b 
+              TableScan
+                alias: b
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -547,114 +533,6 @@ STAGE PLANS:
               GatherStats: false
               MultiFileSpray: false
 
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        a 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: ds=2008-04-08
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_big
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_big { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 2750
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_big
-                      partition_columns ds
-                      serialization.ddl struct bucket_big { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_big
-                  name: default.bucket_big
-                Partition
-                  base file name: ds=2008-04-09
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count 2
-                    bucket_field_name key
-                    columns key,value
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.bucket_big
-                    numFiles 2
-                    numRows 0
-                    partition_columns ds
-                    rawDataSize 0
-                    serialization.ddl struct bucket_big { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 2750
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      SORTBUCKETCOLSPREFIX TRUE
-                      bucket_count 2
-                      bucket_field_name key
-                      columns key,value
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.bucket_big
-                      partition_columns ds
-                      serialization.ddl struct bucket_big { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.bucket_big
-                  name: default.bucket_big
-      Alias -> Map Local Operator Tree:
-        a 
-          TableScan
-            alias: a
-            GatherStats: false
-            HashTable Sink Operator
-              condition expressions:
-                0 
-                1 
-              handleSkewJoin: false
-              keys:
-                0 [Column[key]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
   Stage: Stage-4
     Map Reduce
       Alias -> Map Operator Tree:
@@ -688,6 +566,102 @@ STAGE PLANS:
                           type: bigint
       Local Work:
         Map Reduce Local Work
+          Alias -> Map Local Tables:
+            a 
+              Fetch Operator
+                limit: -1
+                Partition Description:
+                    Partition
+                      base file name: ds=2008-04-08
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-08
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_big
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_big { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 2750
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_big
+                          partition_columns ds
+                          serialization.ddl struct bucket_big { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_big
+                      name: default.bucket_big
+                    Partition
+                      base file name: ds=2008-04-09
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      partition values:
+                        ds 2008-04-09
+                      properties:
+                        COLUMN_STATS_ACCURATE true
+                        bucket_count 2
+                        bucket_field_name key
+                        columns key,value
+                        columns.types string:string
+#### A masked pattern was here ####
+                        name default.bucket_big
+                        numFiles 2
+                        numRows 0
+                        partition_columns ds
+                        rawDataSize 0
+                        serialization.ddl struct bucket_big { string key, string value}
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 2750
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          SORTBUCKETCOLSPREFIX TRUE
+                          bucket_count 2
+                          bucket_field_name key
+                          columns key,value
+                          columns.types string:string
+#### A masked pattern was here ####
+                          name default.bucket_big
+                          partition_columns ds
+                          serialization.ddl struct bucket_big { string key, string value}
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.bucket_big
+                      name: default.bucket_big
+          Alias -> Map Local Operator Tree:
+            a 
+              TableScan
+                alias: a
+                GatherStats: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -1031,6 +1005,18 @@ PREHOOK: Input: default@bucket_big@ds=20
 PREHOOK: Input: default@bucket_small
 PREHOOK: Input: default@bucket_small@ds=2008-04-08
 #### A masked pattern was here ####
+Execution failed with exit status: 2
+Obtaining error information
+
+Task failed!
+Task ID:
+  Stage-3
+
+Logs:
+
+#### A masked pattern was here ####
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
+ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask
 POSTHOOK: query: select count(*) FROM bucket_big a JOIN bucket_small b ON a.key = b.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucket_big