You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2010/02/19 01:58:31 UTC

svn commit: r911664 [2/15] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org...

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket2.q.out?rev=911664&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket2.q.out Fri Feb 19 00:58:28 2010
@@ -0,0 +1,480 @@
+PREHOOK: query: drop table bucket2_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table bucket2_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@bucket2_1
+PREHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucket2_1)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+      Needs Tagging: false
+      Path -> Alias:
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src]
+      Path -> Partition:
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src 
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src
+              name src
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1266535991
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src
+                name src
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1266535991
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: src
+            name: src
+      Reduce Operator Tree:
+        Extract
+          Select Operator
+            expressions:
+                  expr: UDFToInteger(_col0)
+                  type: int
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-33-12_469_4081195671679736172/10000
+              NumFilesPerFileSink: 2
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types int:string
+                    file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket2_1
+                    name bucket2_1
+                    serialization.ddl struct bucket2_1 { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    transient_lastDdlTime 1266535992
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: bucket2_1
+              TotalFiles: 2
+              MultiFileSpray: true
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-33-12_469_4081195671679736172/10000
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.types int:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket2_1
+                name bucket2_1
+                serialization.ddl struct bucket2_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1266535992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: bucket2_1
+          tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-33-12_469_4081195671679736172/10001
+
+
+PREHOOK: query: insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket2_1
+POSTHOOK: query: insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket2_1
+PREHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF bucket2_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        s 
+          TableScan
+            alias: s
+            Filter Operator
+              predicate:
+                  expr: (((hash(key) & 2147483647) % 2) = 0)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (((hash(key) & 2147483647) % 2) = 0)
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: int
+                        expr: value
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  Reduce Output Operator
+                    key expressions:
+                          expr: _col0
+                          type: int
+                    sort order: +
+                    tag: -1
+                    value expressions:
+                          expr: _col0
+                          type: int
+                          expr: _col1
+                          type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket2_1
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-33-17_502_8138633127834770024/10000
+POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket2_1
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-33-17_502_8138633127834770024/10000
+0	val_0
+0	val_0
+0	val_0
+2	val_2
+4	val_4
+8	val_8
+10	val_10
+12	val_12
+12	val_12
+18	val_18
+18	val_18
+20	val_20
+24	val_24
+24	val_24
+26	val_26
+26	val_26
+28	val_28
+30	val_30
+34	val_34
+42	val_42
+42	val_42
+44	val_44
+54	val_54
+58	val_58
+58	val_58
+64	val_64
+66	val_66
+70	val_70
+70	val_70
+70	val_70
+72	val_72
+72	val_72
+74	val_74
+76	val_76
+76	val_76
+78	val_78
+80	val_80
+82	val_82
+84	val_84
+84	val_84
+86	val_86
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+96	val_96
+98	val_98
+98	val_98
+100	val_100
+100	val_100
+104	val_104
+104	val_104
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+120	val_120
+120	val_120
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+134	val_134
+134	val_134
+136	val_136
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+146	val_146
+146	val_146
+150	val_150
+152	val_152
+152	val_152
+156	val_156
+158	val_158
+160	val_160
+162	val_162
+164	val_164
+164	val_164
+166	val_166
+168	val_168
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+176	val_176
+176	val_176
+178	val_178
+180	val_180
+186	val_186
+190	val_190
+192	val_192
+194	val_194
+196	val_196
+200	val_200
+200	val_200
+202	val_202
+208	val_208
+208	val_208
+208	val_208
+214	val_214
+216	val_216
+216	val_216
+218	val_218
+222	val_222
+224	val_224
+224	val_224
+226	val_226
+228	val_228
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+238	val_238
+238	val_238
+242	val_242
+242	val_242
+244	val_244
+248	val_248
+252	val_252
+256	val_256
+256	val_256
+258	val_258
+260	val_260
+262	val_262
+266	val_266
+272	val_272
+272	val_272
+274	val_274
+278	val_278
+278	val_278
+280	val_280
+280	val_280
+282	val_282
+282	val_282
+284	val_284
+286	val_286
+288	val_288
+288	val_288
+292	val_292
+296	val_296
+298	val_298
+298	val_298
+298	val_298
+302	val_302
+306	val_306
+308	val_308
+310	val_310
+316	val_316
+316	val_316
+316	val_316
+318	val_318
+318	val_318
+318	val_318
+322	val_322
+322	val_322
+332	val_332
+336	val_336
+338	val_338
+342	val_342
+342	val_342
+344	val_344
+344	val_344
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+356	val_356
+360	val_360
+362	val_362
+364	val_364
+366	val_366
+368	val_368
+374	val_374
+378	val_378
+382	val_382
+382	val_382
+384	val_384
+384	val_384
+384	val_384
+386	val_386
+392	val_392
+394	val_394
+396	val_396
+396	val_396
+396	val_396
+400	val_400
+402	val_402
+404	val_404
+404	val_404
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+414	val_414
+414	val_414
+418	val_418
+424	val_424
+424	val_424
+430	val_430
+430	val_430
+430	val_430
+432	val_432
+436	val_436
+438	val_438
+438	val_438
+438	val_438
+444	val_444
+446	val_446
+448	val_448
+452	val_452
+454	val_454
+454	val_454
+454	val_454
+458	val_458
+458	val_458
+460	val_460
+462	val_462
+462	val_462
+466	val_466
+466	val_466
+466	val_466
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+470	val_470
+472	val_472
+478	val_478
+478	val_478
+480	val_480
+480	val_480
+480	val_480
+482	val_482
+484	val_484
+490	val_490
+492	val_492
+492	val_492
+494	val_494
+496	val_496
+498	val_498
+498	val_498
+498	val_498
+PREHOOK: query: drop table bucket2_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table bucket2_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@bucket2_1

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket3.q.out?rev=911664&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket3.q.out Fri Feb 19 00:58:28 2010
@@ -0,0 +1,502 @@
+PREHOOK: query: drop table bucket3_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table bucket3_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@bucket3_1
+PREHOOK: query: explain extended
+insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB bucket3_1 (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+      Needs Tagging: false
+      Path -> Alias:
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src [src]
+      Path -> Partition:
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src 
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src
+              name src
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1266536485
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/src
+                name src
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1266536485
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: src
+            name: src
+      Reduce Operator Tree:
+        Extract
+          Select Operator
+            expressions:
+                  expr: UDFToInteger(_col0)
+                  type: int
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-41-26_502_8846263313357969996/10000
+              NumFilesPerFileSink: 2
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types int:string
+                    file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket3_1
+                    name bucket3_1
+                    partition_columns ds
+                    serialization.ddl struct bucket3_1 { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    transient_lastDdlTime 1266536486
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: bucket3_1
+              TotalFiles: 2
+              MultiFileSpray: true
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: true
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-41-26_502_8846263313357969996/10000
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.types int:string
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucket3_1
+                name bucket3_1
+                partition_columns ds
+                serialization.ddl struct bucket3_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1266536486
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: bucket3_1
+          tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-41-26_502_8846263313357969996/10001
+
+
+PREHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket3_1@ds=1
+POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket3_1@ds=1
+PREHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket3_1@ds=2
+POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket3_1@ds=2
+PREHOOK: query: explain
+select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF bucket3_1 (TOK_TABLESAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        s 
+          TableScan
+            alias: s
+            Filter Operator
+              predicate:
+                  expr: ((((hash(key) & 2147483647) % 2) = 0) and (ds = '1'))
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (((hash(key) & 2147483647) % 2) = 0)
+                    type: boolean
+                Filter Operator
+                  predicate:
+                      expr: (ds = '1')
+                      type: boolean
+                  Select Operator
+                    expressions:
+                          expr: key
+                          type: int
+                          expr: value
+                          type: string
+                          expr: ds
+                          type: string
+                    outputColumnNames: _col0, _col1, _col2
+                    Reduce Output Operator
+                      key expressions:
+                            expr: _col0
+                            type: int
+                      sort order: +
+                      tag: -1
+                      value expressions:
+                            expr: _col0
+                            type: int
+                            expr: _col1
+                            type: string
+                            expr: _col2
+                            type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket3_1@ds=1
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-41-35_350_1043920384757897875/10000
+POSTHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket3_1@ds=1
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-18_15-41-35_350_1043920384757897875/10000
+0	val_0	1
+0	val_0	1
+0	val_0	1
+2	val_2	1
+4	val_4	1
+8	val_8	1
+10	val_10	1
+12	val_12	1
+12	val_12	1
+18	val_18	1
+18	val_18	1
+20	val_20	1
+24	val_24	1
+24	val_24	1
+26	val_26	1
+26	val_26	1
+28	val_28	1
+30	val_30	1
+34	val_34	1
+42	val_42	1
+42	val_42	1
+44	val_44	1
+54	val_54	1
+58	val_58	1
+58	val_58	1
+64	val_64	1
+66	val_66	1
+70	val_70	1
+70	val_70	1
+70	val_70	1
+72	val_72	1
+72	val_72	1
+74	val_74	1
+76	val_76	1
+76	val_76	1
+78	val_78	1
+80	val_80	1
+82	val_82	1
+84	val_84	1
+84	val_84	1
+86	val_86	1
+90	val_90	1
+90	val_90	1
+90	val_90	1
+92	val_92	1
+96	val_96	1
+98	val_98	1
+98	val_98	1
+100	val_100	1
+100	val_100	1
+104	val_104	1
+104	val_104	1
+114	val_114	1
+116	val_116	1
+118	val_118	1
+118	val_118	1
+120	val_120	1
+120	val_120	1
+126	val_126	1
+128	val_128	1
+128	val_128	1
+128	val_128	1
+134	val_134	1
+134	val_134	1
+136	val_136	1
+138	val_138	1
+138	val_138	1
+138	val_138	1
+138	val_138	1
+146	val_146	1
+146	val_146	1
+150	val_150	1
+152	val_152	1
+152	val_152	1
+156	val_156	1
+158	val_158	1
+160	val_160	1
+162	val_162	1
+164	val_164	1
+164	val_164	1
+166	val_166	1
+168	val_168	1
+170	val_170	1
+172	val_172	1
+172	val_172	1
+174	val_174	1
+174	val_174	1
+176	val_176	1
+176	val_176	1
+178	val_178	1
+180	val_180	1
+186	val_186	1
+190	val_190	1
+192	val_192	1
+194	val_194	1
+196	val_196	1
+200	val_200	1
+200	val_200	1
+202	val_202	1
+208	val_208	1
+208	val_208	1
+208	val_208	1
+214	val_214	1
+216	val_216	1
+216	val_216	1
+218	val_218	1
+222	val_222	1
+224	val_224	1
+224	val_224	1
+226	val_226	1
+228	val_228	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+238	val_238	1
+238	val_238	1
+242	val_242	1
+242	val_242	1
+244	val_244	1
+248	val_248	1
+252	val_252	1
+256	val_256	1
+256	val_256	1
+258	val_258	1
+260	val_260	1
+262	val_262	1
+266	val_266	1
+272	val_272	1
+272	val_272	1
+274	val_274	1
+278	val_278	1
+278	val_278	1
+280	val_280	1
+280	val_280	1
+282	val_282	1
+282	val_282	1
+284	val_284	1
+286	val_286	1
+288	val_288	1
+288	val_288	1
+292	val_292	1
+296	val_296	1
+298	val_298	1
+298	val_298	1
+298	val_298	1
+302	val_302	1
+306	val_306	1
+308	val_308	1
+310	val_310	1
+316	val_316	1
+316	val_316	1
+316	val_316	1
+318	val_318	1
+318	val_318	1
+318	val_318	1
+322	val_322	1
+322	val_322	1
+332	val_332	1
+336	val_336	1
+338	val_338	1
+342	val_342	1
+342	val_342	1
+344	val_344	1
+344	val_344	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+356	val_356	1
+360	val_360	1
+362	val_362	1
+364	val_364	1
+366	val_366	1
+368	val_368	1
+374	val_374	1
+378	val_378	1
+382	val_382	1
+382	val_382	1
+384	val_384	1
+384	val_384	1
+384	val_384	1
+386	val_386	1
+392	val_392	1
+394	val_394	1
+396	val_396	1
+396	val_396	1
+396	val_396	1
+400	val_400	1
+402	val_402	1
+404	val_404	1
+404	val_404	1
+406	val_406	1
+406	val_406	1
+406	val_406	1
+406	val_406	1
+414	val_414	1
+414	val_414	1
+418	val_418	1
+424	val_424	1
+424	val_424	1
+430	val_430	1
+430	val_430	1
+430	val_430	1
+432	val_432	1
+436	val_436	1
+438	val_438	1
+438	val_438	1
+438	val_438	1
+444	val_444	1
+446	val_446	1
+448	val_448	1
+452	val_452	1
+454	val_454	1
+454	val_454	1
+454	val_454	1
+458	val_458	1
+458	val_458	1
+460	val_460	1
+462	val_462	1
+462	val_462	1
+466	val_466	1
+466	val_466	1
+466	val_466	1
+468	val_468	1
+468	val_468	1
+468	val_468	1
+468	val_468	1
+470	val_470	1
+472	val_472	1
+478	val_478	1
+478	val_478	1
+480	val_480	1
+480	val_480	1
+480	val_480	1
+482	val_482	1
+484	val_484	1
+490	val_490	1
+492	val_492	1
+492	val_492	1
+494	val_494	1
+496	val_496	1
+498	val_498	1
+498	val_498	1
+498	val_498	1
+PREHOOK: query: drop table bucket3_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table bucket3_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@bucket3_1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin1.q.out?rev=911664&r1=911663&r2=911664&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin1.q.out Fri Feb 19 00:58:28 2010
@@ -127,7 +127,8 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002
+                      directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002
+                      NumFilesPerFileSink: 1
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -137,14 +138,16 @@
                             columns.types string:string:string
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                            location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                             name bucketmapjoin_tmp_result
                             serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                             serialization.format 1
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            transient_lastDdlTime 1266347913
+                            transient_lastDdlTime 1266449192
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: bucketmapjoin_tmp_result
+                      TotalFiles: 1
+                      MultiFileSpray: false
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
@@ -200,7 +203,8 @@
                           File Output Operator
                             compressed: false
                             GlobalTableId: 1
-                            directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002
+                            directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002
+                            NumFilesPerFileSink: 1
                             table:
                                 input format: org.apache.hadoop.mapred.TextInputFormat
                                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -210,22 +214,24 @@
                                   columns.types string:string:string
                                   file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                   file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                  location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                                  location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                                   name bucketmapjoin_tmp_result
                                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                   serialization.format 1
                                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                                  transient_lastDdlTime 1266347913
+                                  transient_lastDdlTime 1266449192
                                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 name: bucketmapjoin_tmp_result
+                            TotalFiles: 1
+                            MultiFileSpray: false
           Bucket Mapjoin Context:
               Alias Bucket File Name Mapping:
-                b {file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]}
+                b {file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket20.txt=[file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt, file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt], file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin/srcbucket21.txt=[file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt, file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt]}
       Needs Tagging: false
       Path -> Alias:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin [a]
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin [a]
       Path -> Partition:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin 
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin 
           Partition
             base file name: srcbucket_mapjoin
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -237,12 +243,12 @@
               columns.types int:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin
               name srcbucket_mapjoin
               serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1266347911
+              transient_lastDdlTime 1266449190
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -254,12 +260,12 @@
                 columns.types int:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin
                 name srcbucket_mapjoin
                 serialization.ddl struct srcbucket_mapjoin { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347911
+                transient_lastDdlTime 1266449190
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcbucket_mapjoin
             name: srcbucket_mapjoin
@@ -271,14 +277,14 @@
     Move Operator
       files:
           hdfs directory: true
-          source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002
-          destination: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10000
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002
+          destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10000
 
   Stage: Stage-0
     Move Operator
       tables:
           replace: true
-          source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10000
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -288,20 +294,20 @@
                 columns.types string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                 name bucketmapjoin_tmp_result
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347913
+                transient_lastDdlTime 1266449192
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: bucketmapjoin_tmp_result
-          tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10001
+          tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10001
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002 
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002 
             Reduce Output Operator
               sort order: 
               Map-reduce partition columns:
@@ -317,9 +323,9 @@
                     type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002]
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002]
       Path -> Partition:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10002 
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10002 
           Partition
             base file name: 10002
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -330,12 +336,12 @@
               columns.types string:string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
               name bucketmapjoin_tmp_result
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1266347913
+              transient_lastDdlTime 1266449192
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -346,12 +352,12 @@
                 columns.types string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                 name bucketmapjoin_tmp_result
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347913
+                transient_lastDdlTime 1266449192
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: bucketmapjoin_tmp_result
             name: bucketmapjoin_tmp_result
@@ -360,7 +366,8 @@
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-33_238_8713741927786752274/10000
+            directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-32_403_4982018782937064172/10000
+            NumFilesPerFileSink: 1
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -370,14 +377,16 @@
                   columns.types string:string:string
                   file.inputformat org.apache.hadoop.mapred.TextInputFormat
                   file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                  location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                   name bucketmapjoin_tmp_result
                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  transient_lastDdlTime 1266347913
+                  transient_lastDdlTime 1266449192
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: bucketmapjoin_tmp_result
+            TotalFiles: 1
+            MultiFileSpray: false
 
 
 PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result 
@@ -399,11 +408,11 @@
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucketmapjoin_tmp_result
-PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-53_519_5956091880420865175/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-40_409_8227758911905677185/10000
 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucketmapjoin_tmp_result
-POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-53_519_5956091880420865175/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-40_409_8227758911905677185/10000
 464
 PREHOOK: query: explain extended
 insert overwrite table bucketmapjoin_tmp_result 
@@ -479,7 +488,8 @@
                       File Output Operator
                         compressed: false
                         GlobalTableId: 1
-                        directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002
+                        directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002
+                        NumFilesPerFileSink: 1
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -489,14 +499,16 @@
                               columns.types string:string:string
                               file.inputformat org.apache.hadoop.mapred.TextInputFormat
                               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                              location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                               name bucketmapjoin_tmp_result
                               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                               serialization.format 1
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                              transient_lastDdlTime 1266347913
+                              transient_lastDdlTime 1266449192
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: bucketmapjoin_tmp_result
+                        TotalFiles: 1
+                        MultiFileSpray: false
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
@@ -547,7 +559,8 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002
+                          directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002
+                          NumFilesPerFileSink: 1
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -557,19 +570,21 @@
                                 columns.types string:string:string
                                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                                 name bucketmapjoin_tmp_result
                                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                                 serialization.format 1
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                                transient_lastDdlTime 1266347913
+                                transient_lastDdlTime 1266449192
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: bucketmapjoin_tmp_result
+                          TotalFiles: 1
+                          MultiFileSpray: false
       Needs Tagging: false
       Path -> Alias:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b]
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b]
       Path -> Partition:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 
+        file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 
           Partition
             base file name: ds=2008-04-08
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -583,13 +598,13 @@
               columns.types int:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part
               name srcbucket_mapjoin_part
               partition_columns ds
               serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1266347911
+              transient_lastDdlTime 1266449191
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -601,13 +616,13 @@
                 columns.types int:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket_mapjoin_part
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/srcbucket_mapjoin_part
                 name srcbucket_mapjoin_part
                 partition_columns ds
                 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347911
+                transient_lastDdlTime 1266449191
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcbucket_mapjoin_part
             name: srcbucket_mapjoin_part
@@ -619,14 +634,14 @@
     Move Operator
       files:
           hdfs directory: true
-          source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002
-          destination: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10000
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002
+          destination: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10000
 
   Stage: Stage-0
     Move Operator
       tables:
           replace: true
-          source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10000
+          source: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -636,20 +651,20 @@
                 columns.types string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                 name bucketmapjoin_tmp_result
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347913
+                transient_lastDdlTime 1266449192
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: bucketmapjoin_tmp_result
-          tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10001
+          tmp directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10001
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002 
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002 
             Reduce Output Operator
               sort order: 
               Map-reduce partition columns:
@@ -665,9 +680,9 @@
                     type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002]
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002 [file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002]
       Path -> Partition:
-        file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10002 
+        file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10002 
           Partition
             base file name: 10002
             input format: org.apache.hadoop.mapred.TextInputFormat
@@ -678,12 +693,12 @@
               columns.types string:string:string
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+              location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
               name bucketmapjoin_tmp_result
               serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
               serialization.format 1
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              transient_lastDdlTime 1266347913
+              transient_lastDdlTime 1266449192
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -694,12 +709,12 @@
                 columns.types string:string:string
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                 name bucketmapjoin_tmp_result
                 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                transient_lastDdlTime 1266347913
+                transient_lastDdlTime 1266449192
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: bucketmapjoin_tmp_result
             name: bucketmapjoin_tmp_result
@@ -708,7 +723,8 @@
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-18-59_541_5540791583271613396/10000
+            directory: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-44_802_8678513451756118730/10000
+            NumFilesPerFileSink: 1
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -718,14 +734,16 @@
                   columns.types string:string:string
                   file.inputformat org.apache.hadoop.mapred.TextInputFormat
                   file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
+                  location file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result
                   name bucketmapjoin_tmp_result
                   serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
                   serialization.format 1
                   serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  transient_lastDdlTime 1266347913
+                  transient_lastDdlTime 1266449192
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: bucketmapjoin_tmp_result
+            TotalFiles: 1
+            MultiFileSpray: false
 
 
 PREHOOK: query: insert overwrite table bucketmapjoin_tmp_result 
@@ -747,11 +765,11 @@
 PREHOOK: query: select count(1) from bucketmapjoin_tmp_result
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucketmapjoin_tmp_result
-PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-19-11_856_1704347966505635966/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-53_938_452605270621304413/10000
 POSTHOOK: query: select count(1) from bucketmapjoin_tmp_result
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucketmapjoin_tmp_result
-POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/scratchdir/hive_2010-02-16_11-19-11_856_1704347966505635966/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_15-26-53_938_452605270621304413/10000
 464
 PREHOOK: query: drop table bucketmapjoin_tmp_result
 PREHOOK: type: DROPTABLE