You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by nz...@apache.org on 2010/02/14 03:31:44 UTC

svn commit: r909965 [5/15] - in /hadoop/hive/trunk: ./ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/results/clientpositive/

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out?rev=909965&r1=909964&r2=909965&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out Sun Feb 14 02:31:40 2010
@@ -84,41 +84,41 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x]
       Path -> Partition:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src 
           Partition
             base file name: src
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              name src
+              bucket_count -1
+              columns key,value
               columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src
+              name src
               serialization.ddl struct src { string key, string value}
               serialization.format 1
-              columns key,value
-              bucket_count -1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              file.inputformat org.apache.hadoop.mapred.TextInputFormat
-              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src
-              transient_lastDdlTime 1263595178
+              transient_lastDdlTime 1266042400
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name src
+                bucket_count -1
+                columns key,value
                 columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src
+                name src
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
-                columns key,value
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src
-                transient_lastDdlTime 1263595178
+                transient_lastDdlTime 1266042400
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
             name: src
@@ -142,7 +142,7 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10002
+              directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10002
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -154,7 +154,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10002 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10002 
           Union
             Common Join Operator
               condition map:
@@ -198,25 +198,25 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003
+                      directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
-                            name dest_j1
+                            bucket_count -1
+                            columns key,value,val2
                             columns.types string:string:int
+                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                            name dest_j1
                             serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                             serialization.format 1
-                            columns key,value,val2
-                            bucket_count -1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                            transient_lastDdlTime 1263595178
+                            transient_lastDdlTime 1266042401
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10004 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10004 
           Union
             Common Join Operator
               condition map:
@@ -260,22 +260,22 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003
+                      directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
-                            name dest_j1
+                            bucket_count -1
+                            columns key,value,val2
                             columns.types string:string:int
+                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                            name dest_j1
                             serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                             serialization.format 1
-                            columns key,value,val2
-                            bucket_count -1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                            transient_lastDdlTime 1263595178
+                            transient_lastDdlTime 1266042401
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
       Local Work:
@@ -330,30 +330,30 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003
+                          directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               properties:
-                                name dest_j1
+                                bucket_count -1
+                                columns key,value,val2
                                 columns.types string:string:int
+                                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                                name dest_j1
                                 serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                                 serialization.format 1
-                                columns key,value,val2
-                                bucket_count -1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                                transient_lastDdlTime 1263595178
+                                transient_lastDdlTime 1266042401
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: dest_j1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10002 [file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10002]
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10004 [file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10004]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10002 [file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10002]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10004 [file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10004]
       Path -> Partition:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10002 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10002 
           Partition
             base file name: 10002
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -369,7 +369,7 @@
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10004 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10004 
           Partition
             base file name: 10004
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -393,37 +393,37 @@
     Move Operator
       files:
           hdfs directory: true
-          source: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003
-          destination: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/604913670/10000
+          source: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003
+          destination: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10000
 
   Stage: Stage-0
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/604913670/10000
+          source: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name dest_j1
+                bucket_count -1
+                columns key,value,val2
                 columns.types string:string:int
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                name dest_j1
                 serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                 serialization.format 1
-                columns key,value,val2
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                transient_lastDdlTime 1263595178
+                transient_lastDdlTime 1266042401
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/604913670/10001
+          tmp directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10001
 
   Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003 
             Reduce Output Operator
               sort order: 
               Map-reduce partition columns:
@@ -439,41 +439,41 @@
                     type: int
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003 [file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003 [file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003]
       Path -> Partition:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10003 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10003 
           Partition
             base file name: 10003
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              name dest_j1
+              bucket_count -1
+              columns key,value,val2
               columns.types string:string:int
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+              name dest_j1
               serialization.ddl struct dest_j1 { string key, string value, i32 val2}
               serialization.format 1
-              columns key,value,val2
-              bucket_count -1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              file.inputformat org.apache.hadoop.mapred.TextInputFormat
-              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-              transient_lastDdlTime 1263595178
+              transient_lastDdlTime 1266042401
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name dest_j1
+                bucket_count -1
+                columns key,value,val2
                 columns.types string:string:int
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                name dest_j1
                 serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                 serialization.format 1
-                columns key,value,val2
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                transient_lastDdlTime 1263595178
+                transient_lastDdlTime 1266042401
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
             name: dest_j1
@@ -482,22 +482,22 @@
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/604913670/10000
+            directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10000
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
-                  name dest_j1
+                  bucket_count -1
+                  columns key,value,val2
                   columns.types string:string:int
+                  file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                  file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/dest_j1
+                  name dest_j1
                   serialization.ddl struct dest_j1 { string key, string value, i32 val2}
                   serialization.format 1
-                  columns key,value,val2
-                  bucket_count -1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                  location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/dest_j1
-                  file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  transient_lastDdlTime 1263595178
+                  transient_lastDdlTime 1266042401
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest_j1
 
@@ -545,41 +545,41 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1]
       Path -> Partition:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src 
           Partition
             base file name: src
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              name src
+              bucket_count -1
+              columns key,value
               columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src
+              name src
               serialization.ddl struct src { string key, string value}
               serialization.format 1
-              columns key,value
-              bucket_count -1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              file.inputformat org.apache.hadoop.mapred.TextInputFormat
-              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src
-              transient_lastDdlTime 1263595178
+              transient_lastDdlTime 1266042400
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name src
+                bucket_count -1
+                columns key,value
                 columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/test/data/warehouse/src
+                name src
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
-                columns key,value
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/test/data/warehouse/src
-                transient_lastDdlTime 1263595178
+                transient_lastDdlTime 1266042400
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
             name: src
@@ -603,7 +603,7 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/1737821238/10004
+              directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-26-41_348_1323659698923150253/10004
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -640,11 +640,11 @@
 PREHOOK: query: select * from dest_j1 x order by x.key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j1
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/942115762/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-27-04_511_3697707095560966784/10000
 POSTHOOK: query: select * from dest_j1 x order by x.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest_j1
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/942115762/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-27-04_511_3697707095560966784/10000
 128		3
 146	val_146	2
 150	val_150	1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join40.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join40.q.out?rev=909965&r1=909964&r2=909965&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join40.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join40.q.out Sun Feb 14 02:31:40 2010
@@ -100,12 +100,12 @@
 FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1249230133/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-20_375_8739608897771687968/10000
 POSTHOOK: query: SELECT x.key, x.value, y.key, y.value
 FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1249230133/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-20_375_8739608897771687968/10000
 0	val_0	0	val_0
 0	val_0	0	val_0
 0	val_0	0	val_0
@@ -689,9 +689,9 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        src2 
+        src1 
           TableScan
-            alias: src2
+            alias: src1
             Reduce Output Operator
               key expressions:
                     expr: key
@@ -700,13 +700,13 @@
               Map-reduce partition columns:
                     expr: key
                     type: string
-              tag: 1
+              tag: 0
               value expressions:
-                    expr: value
+                    expr: key
                     type: string
-        src1 
+        src2 
           TableScan
-            alias: src1
+            alias: src2
             Reduce Output Operator
               key expressions:
                     expr: key
@@ -715,9 +715,9 @@
               Map-reduce partition columns:
                     expr: key
                     type: string
-              tag: 0
+              tag: 1
               value expressions:
-                    expr: key
+                    expr: value
                     type: string
       Reduce Operator Tree:
         Join Operator
@@ -751,12 +751,12 @@
 FROM src src1 JOIN src src2 ON (src1.key = src2.key)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1169404066/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-28_099_8963486530334152093/10000
 POSTHOOK: query: select src1.key, src2.value 
 FROM src src1 JOIN src src2 ON (src1.key = src2.key)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1169404066/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-28_099_8963486530334152093/10000
 0	val_0
 0	val_0
 0	val_0
@@ -1805,6 +1805,31 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
+        src1 
+          TableScan
+            alias: src1
+            Filter Operator
+              predicate:
+                  expr: (key < 10)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (key < 10)
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: key
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: key
+                        type: string
+                  tag: 0
+                  value expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
         src2 
           TableScan
             alias: src2
@@ -1847,31 +1872,6 @@
                         type: string
                         expr: value
                         type: string
-        src1 
-          TableScan
-            alias: src1
-            Filter Operator
-              predicate:
-                  expr: (key < 10)
-                  type: boolean
-              Filter Operator
-                predicate:
-                    expr: (key < 10)
-                    type: boolean
-                Reduce Output Operator
-                  key expressions:
-                        expr: key
-                        type: string
-                  sort order: +
-                  Map-reduce partition columns:
-                        expr: key
-                        type: string
-                  tag: 0
-                  value expressions:
-                        expr: key
-                        type: string
-                        expr: value
-                        type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -1908,7 +1908,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1836147411/10002 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-35_615_2133554865043548038/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -1956,12 +1956,12 @@
 SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/561269019/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-35_711_5635391210022763694/10000
 POSTHOOK: query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
 SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/561269019/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-35_711_5635391210022763694/10000
 NULL	NULL	NULL	NULL	10	val_10
 NULL	NULL	NULL	NULL	11	val_11
 NULL	NULL	NULL	NULL	12	val_12
@@ -2050,16 +2050,16 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        src2 
+        src1 
           TableScan
-            alias: src2
+            alias: src1
             Filter Operator
               predicate:
-                  expr: (key < 15)
+                  expr: (key < 10)
                   type: boolean
               Filter Operator
                 predicate:
-                    expr: (key < 15)
+                    expr: (key < 10)
                     type: boolean
                 Reduce Output Operator
                   key expressions:
@@ -2069,22 +2069,22 @@
                   Map-reduce partition columns:
                         expr: key
                         type: string
-                  tag: 1
+                  tag: 0
                   value expressions:
                         expr: key
                         type: string
                         expr: value
                         type: string
-        src3 
+        src2 
           TableScan
-            alias: src3
+            alias: src2
             Filter Operator
               predicate:
-                  expr: (key < 20)
+                  expr: (key < 15)
                   type: boolean
               Filter Operator
                 predicate:
-                    expr: (key < 20)
+                    expr: (key < 15)
                     type: boolean
                 Reduce Output Operator
                   key expressions:
@@ -2094,22 +2094,22 @@
                   Map-reduce partition columns:
                         expr: key
                         type: string
-                  tag: 2
+                  tag: 1
                   value expressions:
                         expr: key
                         type: string
                         expr: value
                         type: string
-        src1 
+        src3 
           TableScan
-            alias: src1
+            alias: src3
             Filter Operator
               predicate:
-                  expr: (key < 10)
+                  expr: (key < 20)
                   type: boolean
               Filter Operator
                 predicate:
-                    expr: (key < 10)
+                    expr: (key < 20)
                     type: boolean
                 Reduce Output Operator
                   key expressions:
@@ -2119,7 +2119,7 @@
                   Map-reduce partition columns:
                         expr: key
                         type: string
-                  tag: 0
+                  tag: 2
                   value expressions:
                         expr: key
                         type: string
@@ -2161,7 +2161,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1889892039/10002 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-47_526_7396170302427973294/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -2209,12 +2209,12 @@
 SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1597568732/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-47_626_1855366263501057105/10000
 POSTHOOK: query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key < 15) RIGHT OUTER JOIN src src3 ON (src1.key = src3.key AND src3.key < 20)
 SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1597568732/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-47_626_1855366263501057105/10000
 NULL	NULL	NULL	NULL	10	val_10
 NULL	NULL	NULL	NULL	11	val_11
 NULL	NULL	NULL	NULL	12	val_12
@@ -2420,12 +2420,12 @@
 FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/302774741/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-58_663_5257751854137888714/10000
 POSTHOOK: query: SELECT /*+ MAPJOIN(y) */ x.key, x.value, y.key, y.value
 FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/302774741/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-26-58_663_5257751854137888714/10000
 238	val_238	NULL	NULL
 86	val_86	86	val_86
 311	val_311	NULL	NULL
@@ -3010,9 +3010,9 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        b 
+        a 
           TableScan
-            alias: b
+            alias: a
             Reduce Output Operator
               key expressions:
                     expr: key
@@ -3021,10 +3021,10 @@
               Map-reduce partition columns:
                     expr: key
                     type: string
-              tag: 1
-        a 
+              tag: 0
+        b 
           TableScan
-            alias: a
+            alias: b
             Reduce Output Operator
               key expressions:
                     expr: key
@@ -3033,7 +3033,7 @@
               Map-reduce partition columns:
                     expr: key
                     type: string
-              tag: 0
+              tag: 1
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -3059,7 +3059,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1375711979/10002 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-07_552_3382271159548134599/10002 
             Reduce Output Operator
               sort order: 
               tag: -1
@@ -3093,9 +3093,9 @@
 PREHOOK: query: SELECT COUNT(1) FROM SRC A JOIN SRC B ON (A.KEY=B.KEY)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/991321663/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-07_618_4569452327751615405/10000
 POSTHOOK: query: SELECT COUNT(1) FROM SRC A JOIN SRC B ON (A.KEY=B.KEY)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/991321663/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-07_618_4569452327751615405/10000
 1028

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out?rev=909965&r1=909964&r2=909965&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out Sun Feb 14 02:31:40 2010
@@ -22,21 +22,6 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        src2 
-          TableScan
-            alias: src2
-            Reduce Output Operator
-              key expressions:
-                    expr: key
-                    type: string
-              sort order: +
-              Map-reduce partition columns:
-                    expr: key
-                    type: string
-              tag: 1
-              value expressions:
-                    expr: value
-                    type: string
         src1 
           TableScan
             alias: src1
@@ -61,48 +46,63 @@
                       type: string
                       expr: hr
                       type: string
+        src2 
+          TableScan
+            alias: src2
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: value
+                    type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/src [src2]
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src1]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/src [src2]
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src1]
       Path -> Partition:
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/src 
           Partition
             base file name: src
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              name src
+              bucket_count -1
+              columns key,value
               columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/src
+              name src
               serialization.ddl struct src { string key, string value}
               serialization.format 1
-              columns key,value
-              bucket_count -1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              file.inputformat org.apache.hadoop.mapred.TextInputFormat
-              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/src
-              transient_lastDdlTime 1263595040
+              transient_lastDdlTime 1266042448
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name src
+                bucket_count -1
+                columns key,value
                 columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/src
+                name src
                 serialization.ddl struct src { string key, string value}
                 serialization.format 1
-                columns key,value
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/src
-                transient_lastDdlTime 1263595040
+                transient_lastDdlTime 1266042448
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
             name: src
-        file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             base file name: hr=12
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -111,35 +111,35 @@
               ds 2008-04-08
               hr 12
             properties:
-              name srcpart
+              bucket_count -1
+              columns key,value
               columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/srcpart
+              name srcpart
+              partition_columns ds/hr
               serialization.ddl struct srcpart { string key, string value}
               serialization.format 1
-              columns key,value
-              partition_columns ds/hr
-              bucket_count -1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              file.inputformat org.apache.hadoop.mapred.TextInputFormat
-              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/srcpart
-              transient_lastDdlTime 1263595039
+              transient_lastDdlTime 1266042447
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name srcpart
+                bucket_count -1
+                columns key,value
                 columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/srcpart
+                name srcpart
+                partition_columns ds/hr
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
-                columns key,value
-                partition_columns ds/hr
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/srcpart
-                transient_lastDdlTime 1263595039
+                transient_lastDdlTime 1266042447
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
             name: srcpart
@@ -174,22 +174,22 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 1
-                  directory: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1485975294/10000
+                  directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-29_246_1645987373446132683/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
-                        name dest1
+                        bucket_count -1
+                        columns key,value
                         columns.types int:string
+                        file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                        file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/dest1
+                        name dest1
                         serialization.ddl struct dest1 { i32 key, string value}
                         serialization.format 1
-                        columns key,value
-                        bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/dest1
-                        transient_lastDdlTime 1263595040
+                        transient_lastDdlTime 1266042449
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
 
@@ -197,25 +197,25 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1485975294/10000
+          source: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-29_246_1645987373446132683/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                name dest1
+                bucket_count -1
+                columns key,value
                 columns.types int:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/test/data/warehouse/dest1
+                name dest1
                 serialization.ddl struct dest1 { i32 key, string value}
                 serialization.format 1
-                columns key,value
-                bucket_count -1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/test/data/warehouse/dest1
-                transient_lastDdlTime 1263595040
+                transient_lastDdlTime 1266042449
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest1
-          tmp directory: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1485975294/10001
+          tmp directory: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-29_246_1645987373446132683/10001
 
 
 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
@@ -233,11 +233,11 @@
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1050075230/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-36_266_8910979444835173818/10000
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_0/build/ql/tmp/1050075230/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_1/build/ql/scratchdir/hive_2010-02-12_22-27-36_266_8910979444835173818/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out?rev=909965&r1=909964&r2=909965&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out Sun Feb 14 02:31:40 2010
@@ -70,9 +70,9 @@
   Stage: Stage-1
     Map Reduce
       Alias -> Map Operator Tree:
-        hive_foo 
+        hive_bar 
           TableScan
-            alias: hive_foo
+            alias: hive_bar
             Reduce Output Operator
               key expressions:
                     expr: foo_id
@@ -81,13 +81,15 @@
               Map-reduce partition columns:
                     expr: foo_id
                     type: int
-              tag: 0
+              tag: 1
               value expressions:
-                    expr: foo_name
+                    expr: bar_id
+                    type: int
+                    expr: bar_name
                     type: string
-        hive_bar 
+        hive_foo 
           TableScan
-            alias: hive_bar
+            alias: hive_foo
             Reduce Output Operator
               key expressions:
                     expr: foo_id
@@ -96,11 +98,9 @@
               Map-reduce partition columns:
                     expr: foo_id
                     type: int
-              tag: 1
+              tag: 0
               value expressions:
-                    expr: bar_id
-                    type: int
-                    expr: bar_name
+                    expr: foo_name
                     type: string
       Reduce Operator Tree:
         Join Operator
@@ -187,14 +187,14 @@
 PREHOOK: Input: default@hive_foo
 PREHOOK: Input: default@hive_count
 PREHOOK: Input: default@hive_bar
-PREHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/462225223/10000
+PREHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-27-47_249_881439767445940507/10000
 POSTHOOK: query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
 hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hive_foo
 POSTHOOK: Input: default@hive_count
 POSTHOOK: Input: default@hive_bar
-POSTHOOK: Output: file:/data/users/heyongqiang/hive-trunk/.ptest_1/build/ql/tmp/462225223/10000
+POSTHOOK: Output: file:/data/users/zshao/hadoop_hive_trunk2/.ptest_2/build/ql/scratchdir/hive_2010-02-12_22-27-47_249_881439767445940507/10000
 foo1	bar10	2
 PREHOOK: query: drop table hive_foo
 PREHOOK: type: DROPTABLE