You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/01 09:01:00 UTC

svn commit: r1463091 [15/16] - in /hive/branches/HIVE-4115: ./ bin/ bin/ext/ common/src/gen/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ contrib/src/java/org/apache/hadoop/hive/contrib/serd...

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/reduce_deduplicate_exclude_join.q.out Mon Apr  1 07:00:00 2013
@@ -7,12 +7,8 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-2 is a root stage
-  Stage-5 depends on stages: Stage-2 , consists of Stage-6, Stage-7, Stage-1
-  Stage-6 has a backup stage: Stage-1
-  Stage-3 depends on stages: Stage-6
-  Stage-7 has a backup stage: Stage-1
-  Stage-4 depends on stages: Stage-7
-  Stage-1
+  Stage-4 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-4
   Stage-0 is a root stage
 
 STAGE PLANS:
@@ -52,10 +48,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
 
-  Stage: Stage-5
-    Conditional Operator
-
-  Stage: Stage-6
+  Stage: Stage-4
     Map Reduce Local Work
       Alias -> Map Local Tables:
         b 
@@ -112,126 +105,6 @@ STAGE PLANS:
       Local Work:
         Map Reduce Local Work
 
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $INTNAME 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $INTNAME 
-            HashTable Sink Operator
-              condition expressions:
-                0 {_col0} {_col1}
-                1 {key} {value}
-              handleSkewJoin: false
-              keys:
-                0 [Column[_col0]]
-                1 [Column[key]]
-              Position of Big Table: 1
-
-  Stage: Stage-4
-    Map Reduce
-      Alias -> Map Operator Tree:
-        b 
-          TableScan
-            alias: b
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              condition expressions:
-                0 {_col0} {_col1}
-                1 {key} {value}
-              handleSkewJoin: false
-              keys:
-                0 [Column[_col0]]
-                1 [Column[key]]
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Position of Big Table: 1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: _col1
-                      type: string
-                      expr: _col2
-                      type: string
-                      expr: _col3
-                      type: string
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Limit
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        $INTNAME 
-            Reduce Output Operator
-              key expressions:
-                    expr: _col0
-                    type: string
-              sort order: +
-              Map-reduce partition columns:
-                    expr: _col0
-                    type: string
-              tag: 0
-              value expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: string
-        b 
-          TableScan
-            alias: b
-            Reduce Output Operator
-              key expressions:
-                    expr: key
-                    type: string
-              sort order: +
-              Map-reduce partition columns:
-                    expr: key
-                    type: string
-              tag: 1
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
-          handleSkewJoin: false
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Select Operator
-            expressions:
-                  expr: _col0
-                  type: string
-                  expr: _col1
-                  type: string
-                  expr: _col2
-                  type: string
-                  expr: _col3
-                  type: string
-            outputColumnNames: _col0, _col1, _col2, _col3
-            Limit
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
   Stage: Stage-0
     Fetch Operator
       limit: 1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats0.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats0.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats0.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats0.q.out Mon Apr  1 07:00:00 2013
@@ -757,10 +757,12 @@ POSTHOOK: Lineage: stats_partitioned PAR
 ds=1
 PREHOOK: query: select * from stats_partitioned where ds is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_partitioned
 PREHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from stats_partitioned where ds is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_partitioned
 POSTHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: stats_non_partitioned.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -2324,10 +2326,12 @@ POSTHOOK: Lineage: stats_partitioned PAR
 ds=1
 PREHOOK: query: select * from stats_partitioned where ds is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_partitioned
 PREHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from stats_partitioned where ds is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_partitioned
 POSTHOOK: Input: default@stats_partitioned@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: stats_non_partitioned.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats3.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats3.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats3.q.out Mon Apr  1 07:00:00 2013
@@ -111,10 +111,12 @@ POSTHOOK: Output: default@hive_test_dst@
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -148,9 +150,11 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -169,10 +173,12 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 6
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 PREHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 POSTHOOK: Input: default@hive_test_dst@pcol1=test_part/pcol2=test_Part
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
@@ -185,17 +191,21 @@ POSTHOOK: Lineage: hive_test_dst PARTITI
 6	test_part	test_Part
 PREHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 PREHOOK: query: select * from hive_test_dst where pcol1='test_Part'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: query: select * from hive_test_dst where pcol1='test_Part'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_test_dst
 #### A masked pattern was here ####
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: hive_test_dst PARTITION(pcol1=test_part,pcol2=test_Part).col1 SIMPLE [(hive_test_src)hive_test_src.FieldSchema(name:col1, type:string, comment:null), ]

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats4.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats4.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/stats4.q.out Mon Apr  1 07:00:00 2013
@@ -283,11 +283,13 @@ ds=2008-12-31/hr=11
 ds=2008-12-31/hr=12
 PREHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_part1
 PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
 PREHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from nzhang_part1 where ds is not null and hr is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_part1
 POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@nzhang_part1@ds=2008-04-08/hr=12
 #### A masked pattern was here ####
@@ -1301,11 +1303,13 @@ POSTHOOK: Lineage: nzhang_part2 PARTITIO
 97	val_97	2008-04-08	12
 PREHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null
 PREHOOK: type: QUERY
+PREHOOK: Input: default@nzhang_part2
 PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
 PREHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from nzhang_part2 where ds is not null and hr is not null
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@nzhang_part2
 POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=11
 POSTHOOK: Input: default@nzhang_part2@ds=2008-12-31/hr=12
 #### A masked pattern was here ####

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/truncate_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/truncate_table.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/truncate_table.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/truncate_table.q.out Mon Apr  1 07:00:00 2013
@@ -129,10 +129,12 @@ POSTHOOK: type: TRUNCATETABLE
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
 PREHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 #### A masked pattern was here ####
 PREHOOK: query: -- truncate partitions with partial spec
@@ -167,11 +169,13 @@ POSTHOOK: Output: default@srcpart_trunca
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
 PREHOOK: query: select * from srcpart_truncate where hr='12'
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate where hr='12'
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
@@ -208,6 +212,7 @@ POSTHOOK: Output: default@srcpart_trunca
 POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
 PREHOOK: query: select * from srcpart_truncate
 PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
@@ -215,6 +220,7 @@ PREHOOK: Input: default@srcpart_truncate
 #### A masked pattern was here ####
 POSTHOOK: query: select * from srcpart_truncate
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
 POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/udf_hour.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/udf_hour.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/udf_hour.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/udf_hour.q.out Mon Apr  1 07:00:00 2013
@@ -72,3 +72,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 13	13	NULL
+PREHOOK: query: SELECT hour(cast('2009-08-07 13:14:15'  as timestamp))
+FROM src WHERE key=86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT hour(cast('2009-08-07 13:14:15'  as timestamp))
+FROM src WHERE key=86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+13

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_1.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_1.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_1.q.out Mon Apr  1 07:00:00 2013
@@ -110,21 +110,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -185,21 +178,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_11.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_11.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_11.q.out Mon Apr  1 07:00:00 2013
@@ -99,24 +99,17 @@ STAGE PLANS:
                   expressions:
                         expr: _col0
                         type: string
-                        expr: _col1
-                        type: int
+                        expr: UDFToLong(_col1)
+                        type: bigint
                   outputColumnNames: _col0, _col1
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                    outputColumnNames: _col0, _col1
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                        name: default.outputtbl1
         null-subquery2:b-subquery2-subquery1:a-subquery1:inputtbl1 
           TableScan
             alias: inputtbl1
@@ -140,24 +133,17 @@ STAGE PLANS:
                       expressions:
                             expr: _col0
                             type: string
-                            expr: _col1
-                            type: int
+                            expr: UDFToLong(_col1)
+                            type: bigint
                       outputColumnNames: _col0, _col1
-                      Select Operator
-                        expressions:
-                              expr: _col0
-                              type: string
-                              expr: UDFToLong(_col1)
-                              type: bigint
-                        outputColumnNames: _col0, _col1
-                        File Output Operator
-                          compressed: false
-                          GlobalTableId: 1
-                          table:
-                              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                              name: default.outputtbl1
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 1
+                        table:
+                            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                            name: default.outputtbl1
         null-subquery2:b-subquery2-subquery2:a-subquery2:inputtbl1 
           TableScan
             alias: inputtbl1
@@ -181,24 +167,17 @@ STAGE PLANS:
                       expressions:
                             expr: _col0
                             type: string
-                            expr: _col1
-                            type: int
+                            expr: UDFToLong(_col1)
+                            type: bigint
                       outputColumnNames: _col0, _col1
-                      Select Operator
-                        expressions:
-                              expr: _col0
-                              type: string
-                              expr: UDFToLong(_col1)
-                              type: bigint
-                        outputColumnNames: _col0, _col1
-                        File Output Operator
-                          compressed: false
-                          GlobalTableId: 1
-                          table:
-                              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                              name: default.outputtbl1
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 1
+                        table:
+                            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                            name: default.outputtbl1
 
   Stage: Stage-6
     Conditional Operator

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_14.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_14.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_14.q.out Mon Apr  1 07:00:00 2013
@@ -91,38 +91,17 @@ STAGE PLANS:
               expressions:
                     expr: key
                     type: string
-                    expr: 1
-                    type: int
+                    expr: UDFToLong(1)
+                    type: bigint
               outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToString(_col1)
-                      type: string
-                outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
-                  outputColumnNames: _col0, _col1
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                    outputColumnNames: _col0, _col1
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.outputtbl1
 
   Stage: Stage-6
     Conditional Operator
@@ -200,31 +179,17 @@ STAGE PLANS:
                 expressions:
                       expr: _col0
                       type: string
-                      expr: _col5
-                      type: string
+                      expr: UDFToLong(_col5)
+                      type: bigint
                 outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
-                  outputColumnNames: _col0, _col1
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                    outputColumnNames: _col0, _col1
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                      name: default.outputtbl1
       Local Work:
         Map Reduce Local Work
 
@@ -270,31 +235,17 @@ STAGE PLANS:
                 expressions:
                       expr: _col0
                       type: string
-                      expr: _col5
-                      type: string
+                      expr: UDFToLong(_col5)
+                      type: bigint
                 outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
-                  outputColumnNames: _col0, _col1
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                    outputColumnNames: _col0, _col1
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                      name: default.outputtbl1
       Local Work:
         Map Reduce Local Work
 
@@ -344,31 +295,17 @@ STAGE PLANS:
             expressions:
                   expr: _col0
                   type: string
-                  expr: _col5
-                  type: string
+                  expr: UDFToLong(_col5)
+                  type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: string
-              outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToLong(_col1)
-                      type: bigint
-                outputColumnNames: _col0, _col1
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-                  table:
-                      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                      name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_15.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_15.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_15.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_15.q.out Mon Apr  1 07:00:00 2013
@@ -118,23 +118,14 @@ STAGE PLANS:
                   expr: '2'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -199,23 +190,14 @@ STAGE PLANS:
                   expr: '1'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -293,10 +275,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -310,10 +294,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	2	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_16.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_16.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_16.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_16.q.out Mon Apr  1 07:00:00 2013
@@ -121,23 +121,14 @@ STAGE PLANS:
                   expr: '2'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-6
     Conditional Operator
@@ -223,23 +214,14 @@ STAGE PLANS:
                   expr: '1'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -317,10 +299,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -334,10 +318,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	2	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_17.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_17.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_17.q.out Mon Apr  1 07:00:00 2013
@@ -80,28 +80,19 @@ STAGE PLANS:
                   expressions:
                         expr: _col0
                         type: string
-                        expr: _col1
-                        type: int
+                        expr: UDFToLong(_col1)
+                        type: bigint
                         expr: _col2
                         type: string
                   outputColumnNames: _col0, _col1, _col2
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                          expr: _col2
-                          type: string
-                    outputColumnNames: _col0, _col1, _col2
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                        name: default.outputtbl1
         null-subquery2:a-subquery2:inputtbl1 
           TableScan
             alias: inputtbl1
@@ -119,28 +110,19 @@ STAGE PLANS:
                   expressions:
                         expr: _col0
                         type: string
-                        expr: _col1
-                        type: int
+                        expr: UDFToLong(_col1)
+                        type: bigint
                         expr: _col2
                         type: string
                   outputColumnNames: _col0, _col1, _col2
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                          expr: _col2
-                          type: string
-                    outputColumnNames: _col0, _col1, _col2
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                        name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -230,10 +212,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -248,10 +232,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	1	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_18.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_18.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_18.q.out Mon Apr  1 07:00:00 2013
@@ -126,23 +126,14 @@ STAGE PLANS:
                   expr: _col1
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -217,23 +208,14 @@ STAGE PLANS:
                   expr: _col1
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -343,10 +325,12 @@ ds=18
 ds=28
 PREHOOK: query: select * from outputTbl1 where ds = '11' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '11' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=11
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=11).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -365,10 +349,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 1	1	11
 PREHOOK: query: select * from outputTbl1 where ds = '18' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=18
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '18' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=18
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=11).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -387,6 +373,7 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	1	18
 PREHOOK: query: select * from outputTbl1 where ds is not null order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=11
 PREHOOK: Input: default@outputtbl1@ds=12
 PREHOOK: Input: default@outputtbl1@ds=13
@@ -396,6 +383,7 @@ PREHOOK: Input: default@outputtbl1@ds=28
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds is not null order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=11
 POSTHOOK: Input: default@outputtbl1@ds=12
 POSTHOOK: Input: default@outputtbl1@ds=13

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_19.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_19.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_19.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_19.q.out Mon Apr  1 07:00:00 2013
@@ -110,21 +110,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -185,21 +178,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1
@@ -359,21 +345,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -438,21 +417,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1
@@ -576,37 +548,30 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
+                  expr: (_col0 + _col0)
+                  type: double
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: (_col0 + _col0)
-                    type: double
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              Filter Operator
-                predicate:
-                    expr: (_col0 >= 7.0)
-                    type: boolean
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: double
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+            Filter Operator
+              predicate:
+                  expr: (_col0 >= 7.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: double
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -662,37 +627,30 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
+                  expr: (_col0 + _col0)
+                  type: double
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: (_col0 + _col0)
-                    type: double
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              Filter Operator
-                predicate:
-                    expr: (_col0 >= 7.0)
-                    type: boolean
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: double
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+            Filter Operator
+              predicate:
+                  expr: (_col0 >= 7.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: double
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_2.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_2.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_2.q.out Mon Apr  1 07:00:00 2013
@@ -81,28 +81,14 @@ STAGE PLANS:
                     expr: 2
                     type: int
               outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToLong(_col1)
-                      type: bigint
-                outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -163,21 +149,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-3
     Map Reduce
@@ -192,28 +171,14 @@ STAGE PLANS:
                     expr: 1
                     type: int
               outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToLong(_col1)
-                      type: bigint
-                outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_20.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_20.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_20.q.out Mon Apr  1 07:00:00 2013
@@ -107,26 +107,19 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
                   expr: _col1
                   type: bigint
+                  expr: _col0
+                  type: string
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col1
-                    type: bigint
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -182,26 +175,19 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
                   expr: _col1
                   type: bigint
+                  expr: _col0
+                  type: string
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col1
-                    type: bigint
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_21.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_21.q.out?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_21.q.out (original)
+++ hive/branches/HIVE-4115/ql/src/test/results/clientpositive/union_remove_21.q.out Mon Apr  1 07:00:00 2013
@@ -109,22 +109,15 @@ STAGE PLANS:
             expressions:
                   expr: _col0
                   type: string
-                  expr: _col1
-                  type: bigint
-            outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -182,22 +175,15 @@ STAGE PLANS:
             expressions:
                   expr: _col0
                   type: string
-                  expr: _col1
-                  type: bigint
-            outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java Mon Apr  1 07:00:00 2013
@@ -31,8 +31,10 @@ import org.apache.hadoop.io.Writable;
  * HiveDeserializer also provides the ObjectInspector which can be used to
  * inspect the internal structure of the object (that is returned by deserialize
  * function).
- *
+ * All deserializers should extend the abstract class AbstractDeserializer, and eventually
+ * Deserializer interface should be removed
  */
+@Deprecated
 public interface Deserializer {
 
   /**
@@ -50,7 +52,7 @@ public interface Deserializer {
    * Deserialize an object out of a Writable blob. In most cases, the return
    * value of this function will be constant since the function will reuse the
    * returned object. If the client wants to keep a copy of the object, the
-   * client needs to clone the returned value by calling
+   * client needs to clone the returnDeserializered value by calling
    * ObjectInspectorUtils.getStandardObject().
    *
    * @param blob

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java Mon Apr  1 07:00:00 2013
@@ -30,10 +30,10 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -42,7 +42,7 @@ import org.apache.hadoop.io.Writable;
  * MetadataTypedColumnsetSerDe.
  *
  */
-public class MetadataTypedColumnsetSerDe implements SerDe {
+public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory
       .getLog(MetadataTypedColumnsetSerDe.class.getName());
@@ -95,6 +95,7 @@ public class MetadataTypedColumnsetSerDe
     return defaultVal;
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl) throws SerDeException {
     String altSep = tbl.getProperty(serdeConstants.SERIALIZATION_FORMAT);
     separator = getByteValue(altSep, DefaultSeparator);
@@ -167,6 +168,7 @@ public class MetadataTypedColumnsetSerDe
 
   ColumnSet deserializeCache = new ColumnSet();
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     String row = null;
     if (field instanceof BytesWritable) {
@@ -193,16 +195,19 @@ public class MetadataTypedColumnsetSerDe
     }
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return cachedObjectInspector;
   }
 
+  @Override
   public Class<? extends Writable> getSerializedClass() {
     return Text.class;
   }
 
   Text serializeCache = new Text();
 
+  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
 
     if (objInspector.getCategory() != Category.STRUCT) {
@@ -232,6 +237,7 @@ public class MetadataTypedColumnsetSerDe
     return serializeCache;
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java Mon Apr  1 07:00:00 2013
@@ -32,7 +32,7 @@ import org.apache.hadoop.io.Writable;
  * Placeholder SerDe for cases where neither serialization nor deserialization is needed
  *
  */
-public class NullStructSerDe implements SerDe {
+public class NullStructSerDe extends AbstractSerDe {
 
   class NullStructField implements StructField {
     @Override

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java Mon Apr  1 07:00:00 2013
@@ -57,7 +57,7 @@ import org.apache.hadoop.io.Writable;
  * writableStringObjectInspector. We should switch to that when we have a UTF-8
  * based Regex library.
  */
-public class RegexSerDe implements SerDe {
+public class RegexSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName());
 
@@ -249,6 +249,7 @@ public class RegexSerDe implements SerDe
           "Regex SerDe doesn't support the serialize() method");
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java Mon Apr  1 07:00:00 2013
@@ -20,14 +20,16 @@ package org.apache.hadoop.hive.serde2;
 
 /**
  * A union of HiveDeserializer and HiveSerializer interface.
- * 
+ *
  * If a developer wants his hive table to be read-only, then he just want to
  * return
- * 
+ *
  * both readable and writable, then
- * 
- * 
+ *
+ * All serdes should extend the abstract class AbstractSerDe, and eventually SerDe interface
+ * should be removed
  */
+@Deprecated
 public interface SerDe extends Deserializer, Serializer {
 
 }

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java Mon Apr  1 07:00:00 2013
@@ -28,8 +28,10 @@ import org.apache.hadoop.io.Writable;
  * HiveSerializer is used to serialize data to a Hadoop Writable object. The
  * serialize In addition to the interface below, all implementations are assume
  * to have a ctor that takes a single 'Table' object as argument.
- *
+ * All serializers should extend the abstract class AbstractSerializer, and eventually
+ * Serializer interface should be removed
  */
+@Deprecated
 public interface Serializer {
 
   /**

Modified: hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java?rev=1463091&r1=1463090&r2=1463091&view=diff
==============================================================================
--- hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java (original)
+++ hive/branches/HIVE-4115/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java Mon Apr  1 07:00:00 2013
@@ -33,7 +33,7 @@ import org.apache.hadoop.util.Reflection
  * TypedSerDe.
  *
  */
-public abstract class TypedSerDe implements SerDe {
+public abstract class TypedSerDe extends AbstractSerDe {
 
   protected Type objectType;
   protected Class<?> objectClass;
@@ -52,6 +52,7 @@ public abstract class TypedSerDe impleme
 
   protected Object deserializeCache;
 
+  @Override
   public Object deserialize(Writable blob) throws SerDeException {
     if (deserializeCache == null) {
       return ReflectionUtils.newInstance(objectClass, null);
@@ -61,6 +62,7 @@ public abstract class TypedSerDe impleme
     }
   }
 
+  @Override
   public ObjectInspector getObjectInspector() throws SerDeException {
     return ObjectInspectorFactory.getReflectionObjectInspector(objectType,
         getObjectInspectorOptions());
@@ -70,18 +72,22 @@ public abstract class TypedSerDe impleme
     return ObjectInspectorFactory.ObjectInspectorOptions.JAVA;
   }
 
+  @Override
   public void initialize(Configuration job, Properties tbl) throws SerDeException {
     // do nothing
   }
 
+  @Override
   public Class<? extends Writable> getSerializedClass() {
     return BytesWritable.class;
   }
 
+  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
     throw new RuntimeException("not supported");
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;