You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/12/20 01:04:30 UTC
svn commit: r892539 [1/2] - in /hadoop/hive/trunk: ./ data/files/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/queries/clientnegative/
ql/src/test/queries/clientpositive/ ql/src/test/results/clientnega...
Author: namit
Date: Sun Dec 20 00:04:29 2009
New Revision: 892539
URL: http://svn.apache.org/viewvc?rev=892539&view=rev
Log:
HIVE-999 bug in sampling if denominator != numBuckets
(He Yongqiang via namit)
Added:
hadoop/hive/trunk/data/files/srcbucket20.txt
hadoop/hive/trunk/data/files/srcbucket21.txt
hadoop/hive/trunk/data/files/srcbucket22.txt
hadoop/hive/trunk/data/files/srcbucket23.txt
hadoop/hive/trunk/ql/src/test/queries/clientnegative/sample.q
hadoop/hive/trunk/ql/src/test/results/clientnegative/sample.q.out
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hadoop/hive/trunk/ql/src/test/queries/clientpositive/sample6.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sun Dec 20 00:04:29 2009
@@ -327,12 +327,15 @@
HIVE-993 Remove duplicate conversion for MapJoin
(He Yongqiang via namit)
- HIVE-993 commons-dhcp not working
+ HIVE-992 commons-dhcp not working
(Bennie Schut via namit)
HIVE-595 throw error if rename fails
(Zheng Shao via namit)
+ HIVE-999 bug in sampling if denominator != numBuckets
+ (He Yongqiang via namit)
+
Release 0.4.0 - Unreleased
INCOMPATIBLE CHANGES
Added: hadoop/hive/trunk/data/files/srcbucket20.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/srcbucket20.txt?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/srcbucket20.txt (added)
+++ hadoop/hive/trunk/data/files/srcbucket20.txt Sun Dec 20 00:04:29 2009
@@ -0,0 +1,118 @@
+165val_165
+484val_484
+150val_150
+224val_224
+66val_66
+213val_213
+374val_374
+495val_495
+37val_37
+327val_327
+15val_15
+338val_338
+459val_459
+466val_466
+396val_396
+309val_309
+367val_367
+0val_0
+455val_455
+316val_316
+345val_345
+129val_129
+378val_378
+4val_4
+356val_356
+169val_169
+125val_125
+437val_437
+286val_286
+187val_187
+176val_176
+459val_459
+51val_51
+103val_103
+239val_239
+213val_213
+176val_176
+275val_275
+260val_260
+404val_404
+217val_217
+84val_84
+466val_466
+8val_8
+411val_411
+172val_172
+129val_129
+158val_158
+0val_0
+26val_26
+165val_165
+327val_327
+51val_51
+404val_404
+95val_95
+282val_282
+187val_187
+316val_316
+169val_169
+77val_77
+0val_0
+118val_118
+282val_282
+419val_419
+15val_15
+118val_118
+19val_19
+224val_224
+309val_309
+389val_389
+327val_327
+242val_242
+392val_392
+242val_242
+396val_396
+95val_95
+11val_11
+143val_143
+228val_228
+33val_33
+103val_103
+367val_367
+239val_239
+480val_480
+202val_202
+316val_316
+235val_235
+80val_80
+44val_44
+466val_466
+257val_257
+190val_190
+114val_114
+396val_396
+217val_217
+125val_125
+187val_187
+480val_480
+491val_491
+305val_305
+444val_444
+169val_169
+323val_323
+480val_480
+136val_136
+172val_172
+462val_462
+26val_26
+462val_462
+341val_341
+183val_183
+84val_84
+37val_37
+448val_448
+194val_194
+477val_477
+169val_169
+400val_400
Added: hadoop/hive/trunk/data/files/srcbucket21.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/srcbucket21.txt?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/srcbucket21.txt (added)
+++ hadoop/hive/trunk/data/files/srcbucket21.txt Sun Dec 20 00:04:29 2009
@@ -0,0 +1,120 @@
+27val_27
+409val_409
+265val_265
+401val_401
+166val_166
+430val_430
+287val_287
+207val_207
+199val_199
+247val_247
+489val_489
+162val_162
+397val_397
+342val_342
+195val_195
+155val_155
+203val_203
+339val_339
+302val_302
+438val_438
+489val_489
+221val_221
+92val_92
+111val_111
+427val_427
+382val_382
+386val_386
+430val_430
+221val_221
+137val_137
+180val_180
+12val_12
+67val_67
+489val_489
+353val_353
+272val_272
+463val_463
+119val_119
+496val_496
+393val_393
+100val_100
+298val_298
+199val_199
+191val_191
+96val_96
+481val_481
+409val_409
+470val_470
+137val_137
+85val_85
+364val_364
+306val_306
+272val_272
+331val_331
+401val_401
+452val_452
+177val_177
+5val_5
+317val_317
+34val_34
+229val_229
+195val_195
+430val_430
+119val_119
+489val_489
+78val_78
+41val_41
+492val_492
+449val_449
+218val_218
+30val_30
+74val_74
+342val_342
+368val_368
+485val_485
+70val_70
+401val_401
+191val_191
+5val_5
+438val_438
+467val_467
+229val_229
+463val_463
+283val_283
+331val_331
+335val_335
+104val_104
+409val_409
+401val_401
+258val_258
+203val_203
+12val_12
+478val_478
+298val_298
+382val_382
+5val_5
+70val_70
+397val_397
+104val_104
+70val_70
+438val_438
+119val_119
+360val_360
+199val_199
+478val_478
+317val_317
+207val_207
+265val_265
+353val_353
+214val_214
+133val_133
+375val_375
+401val_401
+67val_67
+379val_379
+492val_492
+100val_100
+298val_298
+9val_9
+126val_126
Added: hadoop/hive/trunk/data/files/srcbucket22.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/srcbucket22.txt?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/srcbucket22.txt (added)
+++ hadoop/hive/trunk/data/files/srcbucket22.txt Sun Dec 20 00:04:29 2009
@@ -0,0 +1,124 @@
+86val_86
+255val_255
+273val_273
+369val_369
+406val_406
+152val_152
+145val_145
+277val_277
+82val_82
+417val_417
+219val_219
+446val_446
+394val_394
+237val_237
+482val_482
+174val_174
+413val_413
+208val_208
+174val_174
+417val_417
+365val_365
+266val_266
+439val_439
+325val_325
+167val_167
+475val_475
+17val_17
+57val_57
+149val_149
+170val_170
+20val_20
+280val_280
+35val_35
+277val_277
+208val_208
+192val_192
+138val_138
+318val_318
+332val_332
+284val_284
+181val_181
+138val_138
+208val_208
+24val_24
+431val_431
+42val_42
+468val_468
+156val_156
+468val_468
+196val_196
+288val_288
+457val_457
+318val_318
+318val_318
+369val_369
+413val_413
+134val_134
+138val_138
+307val_307
+435val_435
+277val_277
+273val_273
+369val_369
+226val_226
+497val_497
+402val_402
+35val_35
+336val_336
+233val_233
+42val_42
+321val_321
+149val_149
+138val_138
+453val_453
+64val_64
+468val_468
+167val_167
+219val_219
+116val_116
+288val_288
+244val_244
+280val_280
+2val_2
+35val_35
+321val_321
+53val_53
+105val_105
+406val_406
+406val_406
+262val_262
+424val_424
+431val_431
+431val_431
+424val_424
+291val_291
+24val_24
+255val_255
+163val_163
+200val_200
+237val_237
+439val_439
+248val_248
+479val_479
+417val_417
+325val_325
+277val_277
+178val_178
+468val_468
+310val_310
+493val_493
+460val_460
+233val_233
+406val_406
+189val_189
+134val_134
+167val_167
+273val_273
+97val_97
+28val_28
+152val_152
+307val_307
+222val_222
+200val_200
+97val_97
Added: hadoop/hive/trunk/data/files/srcbucket23.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/srcbucket23.txt?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/srcbucket23.txt (added)
+++ hadoop/hive/trunk/data/files/srcbucket23.txt Sun Dec 20 00:04:29 2009
@@ -0,0 +1,138 @@
+238val_238
+311val_311
+278val_278
+98val_98
+193val_193
+128val_128
+146val_146
+429val_429
+469val_469
+281val_281
+209val_209
+403val_403
+252val_252
+292val_292
+153val_153
+193val_193
+494val_494
+399val_399
+377val_377
+113val_113
+128val_128
+311val_311
+205val_205
+157val_157
+47val_47
+72val_72
+399val_399
+498val_498
+469val_469
+54val_54
+216val_216
+278val_278
+289val_289
+65val_65
+311val_311
+241val_241
+83val_83
+333val_333
+230val_230
+384val_384
+373val_373
+348val_348
+58val_58
+230val_230
+348val_348
+179val_179
+322val_322
+197val_197
+454val_454
+418val_418
+230val_230
+205val_205
+120val_120
+131val_131
+43val_43
+436val_436
+469val_469
+308val_308
+98val_98
+197val_197
+490val_490
+87val_87
+179val_179
+395val_395
+238val_238
+72val_72
+90val_90
+10val_10
+395val_395
+58val_58
+168val_168
+472val_472
+322val_322
+498val_498
+160val_160
+458val_458
+76val_76
+223val_223
+209val_209
+76val_76
+69val_69
+230val_230
+296val_296
+113val_113
+216val_216
+344val_344
+274val_274
+223val_223
+256val_256
+263val_263
+487val_487
+128val_128
+432val_432
+469val_469
+193val_193
+366val_366
+175val_175
+403val_403
+483val_483
+90val_90
+348val_348
+201val_201
+164val_164
+454val_454
+164val_164
+351val_351
+414val_414
+120val_120
+429val_429
+443val_443
+230val_230
+333val_333
+249val_249
+83val_83
+175val_175
+454val_454
+421val_421
+407val_407
+384val_384
+256val_256
+384val_384
+18val_18
+498val_498
+146val_146
+458val_458
+362val_362
+186val_186
+285val_285
+348val_348
+18val_18
+281val_281
+344val_344
+469val_469
+315val_315
+348val_348
+414val_414
+90val_90
+403val_403
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Sun Dec 20 00:04:29 2009
@@ -97,6 +97,7 @@
INVALID_MAPJOIN_HINT("neither table specified as map-table"),
INVALID_MAPJOIN_TABLE("result of a union cannot be a map table"),
NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table"),
+ BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than denaminator in sample clause for Table"),
NEED_PARTITION_ERROR("need to specify partition columns because the destination table is partitioned."),
CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in the same command"),
CTAS_COLLST_COEXISTENCE("Create table as select command cannot specify the list of columns for the target table."),
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sun Dec 20 00:04:29 2009
@@ -4523,6 +4523,10 @@
if (tabBucketCols.size() == 0 && sampleExprs.size() == 0) {
throw new SemanticException(ErrorMsg.NON_BUCKETED_TABLE.getMsg() + " " + tab.getName());
}
+
+ if (num > den) {
+ throw new SemanticException(ErrorMsg.BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR.getMsg() + " " + tab.getName());
+ }
// check if a predicate is needed
// predicate is needed if either input pruning is not enough
@@ -4553,7 +4557,7 @@
// check if input pruning is enough
if ((sampleExprs == null || sampleExprs.size() == 0 || colsEqual)
- && (num == den || den <= numBuckets && numBuckets % den == 0)) {
+ && (num == den || (den % numBuckets == 0 || numBuckets % den == 0))) {
// input pruning is enough; add the filter for the optimizer to use it later
LOG.info("No need for sample filter");
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Sun Dec 20 00:04:29 2009
@@ -238,7 +238,7 @@
String warehousePath = ((new URI(testWarehouse)).getPath());
// Drop any tables that remain due to unsuccessful runs
for(String s: new String [] {"src", "src1", "src_json", "src_thrift", "src_sequencefile",
- "srcpart", "srcbucket", "dest1", "dest2",
+ "srcpart", "srcbucket","srcbucket2", "dest1", "dest2",
"dest3", "dest4", "dest4_sequencefile",
"dest_j1", "dest_j2", "dest_g1", "dest_g2",
"fetchtask_ioexception"}) {
@@ -313,6 +313,16 @@
fs.copyFromLocalFile(false, true, fpath, newfpath);
runLoadCmd("LOAD DATA INPATH '" + newfpath.toString() + "' INTO TABLE srcbucket");
}
+
+ runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
+ //db.createTable("srcbucket", cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class, 2, bucketCols);
+ srcTables.add("srcbucket2");
+ for (String fname: new String [] {"srcbucket20.txt", "srcbucket21.txt", "srcbucket22.txt", "srcbucket23.txt"}) {
+ fpath = new Path(testFiles, fname);
+ newfpath = new Path(tmppath, fname);
+ fs.copyFromLocalFile(false, true, fpath, newfpath);
+ runLoadCmd("LOAD DATA INPATH '" + newfpath.toString() + "' INTO TABLE srcbucket2");
+ }
for (String tname: new String [] {"src", "src1"}) {
db.createTable(tname, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/sample.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/sample.q?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/sample.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/sample.q Sun Dec 20 00:04:29 2009
@@ -0,0 +1 @@
+explain extended SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 5 OUT OF 4 on key) s
\ No newline at end of file
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/sample6.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/sample6.q?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/sample6.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/sample6.q Sun Dec 20 00:04:29 2009
@@ -9,3 +9,23 @@
FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s;
SELECT dest1.* FROM dest1;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s;
+SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
+SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s;
+SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s;
+SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
+SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s;
+
+EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s;
+SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s;
+
+drop table dest1;
\ No newline at end of file
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/sample.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/sample.q.out?rev=892539&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/sample.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/sample.q.out Sun Dec 20 00:04:29 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Numberator should not be bigger than denaminator in sample clause for Table srcbucket
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out Sun Dec 20 00:04:29 2009
@@ -39,6 +39,7 @@
src_sequencefile
src_thrift
srcbucket
+srcbucket2
srcpart
test2a
test2b
@@ -57,6 +58,7 @@
src_sequencefile
src_thrift
srcbucket
+srcbucket2
srcpart
test2b
PREHOOK: query: DROP TABLE TEST2b
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out Sun Dec 20 00:04:29 2009
@@ -43,6 +43,7 @@
src_sequencefile
src_thrift
srcbucket
+srcbucket2
srcpart
test3a
test3b
@@ -126,6 +127,7 @@
src_sequencefile
src_thrift
srcbucket
+srcbucket2
srcpart
test3a
test3c
@@ -163,7 +165,7 @@
r1 int
r2 double
-Detailed Table Information Table(tableName:test3c, dbName:default, owner:njain, createTime:1253780443, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:r1, type:int, comment:null), FieldSchema(name:r2, type:double, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/test3c, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain,last_modified_time=1253780444})
+Detailed Table Information Table(tableName:test3c, dbName:default, owner:njain, createTime:1261267074, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:r1, type:int, comment:null), FieldSchema(name:r2, type:double, comment:null)], location:file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/test3c, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain,last_modified_time=1261267075,transient_lastDdlTime=1261267075})
PREHOOK: query: DROP TABLE TEST3a
PREHOOK: type: DROPTABLE
POSTHOOK: query: DROP TABLE TEST3a
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out Sun Dec 20 00:04:29 2009
@@ -48,7 +48,7 @@
File Output Operator
compressed: false
GlobalTableId: 1
- directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -63,7 +63,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1
- transient_lastDdlTime 1258006761
+ transient_lastDdlTime 1261259092
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
Needs Tagging: false
@@ -86,6 +86,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -102,7 +103,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
- transient_lastDdlTime 1258006760
+ transient_lastDdlTime 1261259090
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: srcbucket
name: srcbucket
@@ -113,11 +114,11 @@
Move Operator
files:
hdfs directory: true
- source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002
- destination: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/23996439/10000
+ source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002
+ destination: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/919194157/10000
Map Reduce
Alias -> Map Operator Tree:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002
Reduce Output Operator
sort order:
Map-reduce partition columns:
@@ -131,9 +132,9 @@
type: string
Needs Tagging: false
Path -> Alias:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002]
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002 [file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002]
Path -> Partition:
- file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2091516733/10002
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1206064756/10002
Partition
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -148,7 +149,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1
- transient_lastDdlTime 1258006761
+ transient_lastDdlTime 1261259092
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -164,7 +165,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1
- transient_lastDdlTime 1258006761
+ transient_lastDdlTime 1261259092
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
name: dest1
@@ -173,7 +174,7 @@
File Output Operator
compressed: false
GlobalTableId: 0
- directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/23996439/10000
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/919194157/10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -188,7 +189,7 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- transient_lastDdlTime 1258006761
+ transient_lastDdlTime 1261259092
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
@@ -196,7 +197,7 @@
Move Operator
tables:
replace: true
- source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/23996439/10000
+ source: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/919194157/10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -211,10 +212,10 @@
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1
- transient_lastDdlTime 1258006761
+ transient_lastDdlTime 1261259092
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: dest1
- tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/23996439/10001
+ tmp directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/919194157/10001
PREHOOK: query: INSERT OVERWRITE TABLE dest1 SELECT s.*
@@ -230,11 +231,11 @@
PREHOOK: query: SELECT dest1.* FROM dest1
PREHOOK: type: QUERY
PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1335018402/10000
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1816827007/10000
POSTHOOK: query: SELECT dest1.* FROM dest1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1335018402/10000
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1816827007/10000
468 val_469
272 val_273
448 val_449
@@ -484,3 +485,2202 @@
348 val_348
400 val_400
200 val_200
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 4 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 4) = 3)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 4) = 3)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2061669317/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket
+ name: srcbucket
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1816410424/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1816410424/10000
+179 val_180
+291 val_292
+271 val_272
+135 val_136
+167 val_168
+423 val_424
+455 val_456
+231 val_232
+287 val_288
+31 val_32
+447 val_448
+443 val_444
+175 val_176
+147 val_148
+371 val_372
+35 val_36
+243 val_244
+15 val_16
+391 val_392
+343 val_344
+275 val_276
+455 val_456
+99 val_100
+335 val_336
+367 val_368
+59 val_60
+11 val_12
+123 val_124
+63 val_64
+135 val_136
+3 val_4
+331 val_332
+239 val_240
+235 val_236
+11 val_12
+71 val_72
+323 val_324
+467 val_468
+411 val_412
+175 val_176
+147 val_148
+119 val_120
+347 val_348
+427 val_428
+291 val_292
+375 val_376
+191 val_192
+491 val_492
+351 val_352
+23 val_24
+487 val_488
+331 val_332
+475 val_476
+463 val_464
+183 val_184
+399 val_400
+15 val_16
+275 val_276
+87 val_88
+75 val_76
+443 val_444
+407 val_408
+367 val_368
+51 val_52
+355 val_356
+363 val_364
+19 val_20
+47 val_48
+239 val_240
+119 val_120
+11 val_12
+143 val_144
+199 val_200
+375 val_376
+363 val_364
+395 val_396
+347 val_348
+259 val_260
+303 val_304
+303 val_304
+451 val_452
+351 val_352
+443 val_444
+399 val_400
+267 val_268
+375 val_376
+295 val_296
+439 val_440
+119 val_120
+375 val_376
+391 val_392
+287 val_288
+375 val_376
+35 val_36
+435 val_436
+151 val_152
+491 val_492
+295 val_296
+407 val_408
+439 val_440
+335 val_336
+371 val_372
+123 val_124
+355 val_356
+87 val_88
+427 val_428
+371 val_372
+495 val_496
+259 val_260
+243 val_244
+151 val_152
+415 val_416
+363 val_364
+459 val_460
+379 val_380
+215 val_216
+431 val_432
+407 val_408
+371 val_372
+135 val_136
+91 val_92
+35 val_36
+227 val_228
+395 val_396
+311 val_311
+27 val_27
+255 val_255
+495 val_495
+327 val_327
+15 val_15
+403 val_403
+219 val_219
+287 val_287
+459 val_459
+207 val_207
+199 val_199
+399 val_399
+247 val_247
+439 val_439
+367 val_367
+167 val_167
+195 val_195
+475 val_475
+155 val_155
+203 val_203
+339 val_339
+455 val_455
+311 val_311
+111 val_111
+47 val_47
+35 val_35
+427 val_427
+399 val_399
+187 val_187
+459 val_459
+51 val_51
+103 val_103
+239 val_239
+311 val_311
+275 val_275
+83 val_83
+67 val_67
+411 val_411
+463 val_463
+431 val_431
+179 val_179
+119 val_119
+199 val_199
+191 val_191
+327 val_327
+131 val_131
+51 val_51
+43 val_43
+95 val_95
+187 val_187
+87 val_87
+179 val_179
+395 val_395
+419 val_419
+15 val_15
+307 val_307
+19 val_19
+435 val_435
+327 val_327
+331 val_331
+395 val_395
+35 val_35
+95 val_95
+11 val_11
+143 val_143
+195 val_195
+119 val_119
+223 val_223
+103 val_103
+367 val_367
+167 val_167
+219 val_219
+239 val_239
+223 val_223
+263 val_263
+487 val_487
+191 val_191
+467 val_467
+463 val_463
+35 val_35
+283 val_283
+331 val_331
+235 val_235
+335 val_335
+175 val_175
+403 val_403
+483 val_483
+203 val_203
+431 val_431
+431 val_431
+187 val_187
+291 val_291
+351 val_351
+255 val_255
+163 val_163
+119 val_119
+491 val_491
+439 val_439
+479 val_479
+199 val_199
+443 val_443
+323 val_323
+207 val_207
+83 val_83
+175 val_175
+375 val_375
+407 val_407
+67 val_67
+379 val_379
+167 val_167
+183 val_183
+315 val_315
+307 val_307
+403 val_403
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/166063607/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket
+ name: srcbucket
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/694677631/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/694677631/10000
+474 val_475
+62 val_63
+468 val_469
+272 val_273
+448 val_449
+246 val_247
+440 val_441
+278 val_279
+296 val_297
+428 val_429
+126 val_127
+106 val_107
+356 val_357
+490 val_491
+402 val_403
+128 val_129
+10 val_11
+226 val_227
+110 val_111
+0 val_1
+240 val_241
+286 val_287
+408 val_409
+476 val_477
+482 val_483
+48 val_49
+424 val_425
+226 val_227
+494 val_495
+488 val_489
+94 val_95
+50 val_51
+402 val_403
+128 val_129
+468 val_469
+314 val_315
+224 val_225
+344 val_345
+4 val_5
+206 val_207
+114 val_115
+56 val_57
+114 val_115
+254 val_255
+390 val_391
+304 val_305
+264 val_265
+196 val_197
+238 val_239
+20 val_21
+492 val_493
+82 val_83
+58 val_59
+86 val_87
+438 val_439
+360 val_361
+222 val_223
+42 val_43
+338 val_339
+68 val_69
+16 val_17
+492 val_493
+376 val_377
+120 val_121
+306 val_307
+426 val_427
+132 val_133
+446 val_447
+386 val_387
+388 val_389
+184 val_185
+284 val_285
+246 val_247
+262 val_263
+122 val_123
+438 val_439
+390 val_391
+352 val_353
+226 val_227
+328 val_329
+382 val_383
+342 val_343
+480 val_481
+102 val_103
+480 val_481
+318 val_319
+392 val_393
+476 val_477
+258 val_259
+174 val_175
+252 val_253
+114 val_115
+264 val_265
+48 val_49
+336 val_337
+340 val_341
+390 val_391
+484 val_485
+6 val_7
+260 val_261
+2 val_3
+170 val_171
+164 val_165
+118 val_119
+310 val_311
+104 val_105
+80 val_81
+326 val_327
+450 val_451
+140 val_141
+212 val_213
+308 val_309
+30 val_31
+358 val_359
+416 val_417
+42 val_43
+386 val_387
+454 val_455
+364 val_365
+20 val_21
+52 val_53
+40 val_41
+8 val_9
+168 val_169
+384 val_385
+324 val_325
+310 val_311
+206 val_207
+404 val_405
+206 val_207
+226 val_227
+262 val_263
+260 val_261
+328 val_329
+322 val_323
+122 val_123
+404 val_405
+384 val_385
+76 val_77
+116 val_117
+42 val_43
+104 val_105
+406 val_407
+32 val_33
+132 val_133
+192 val_193
+58 val_59
+70 val_71
+356 val_357
+352 val_353
+52 val_53
+330 val_331
+138 val_139
+160 val_161
+454 val_455
+76 val_77
+174 val_175
+412 val_413
+16 val_17
+204 val_205
+126 val_127
+274 val_275
+374 val_375
+494 val_495
+216 val_217
+470 val_471
+196 val_197
+302 val_303
+450 val_451
+12 val_13
+398 val_399
+334 val_335
+384 val_385
+60 val_61
+442 val_443
+52 val_53
+404 val_405
+446 val_447
+300 val_301
+0 val_1
+268 val_269
+392 val_393
+104 val_105
+436 val_437
+156 val_157
+118 val_119
+172 val_173
+244 val_245
+6 val_7
+284 val_285
+164 val_165
+136 val_137
+462 val_463
+432 val_433
+496 val_497
+144 val_145
+408 val_409
+152 val_153
+382 val_383
+348 val_349
+122 val_123
+292 val_293
+182 val_183
+474 val_475
+310 val_311
+52 val_53
+486 val_487
+152 val_153
+378 val_379
+414 val_415
+256 val_257
+292 val_293
+412 val_413
+40 val_41
+478 val_479
+178 val_179
+100 val_101
+156 val_157
+228 val_229
+22 val_23
+248 val_249
+402 val_403
+62 val_63
+162 val_163
+244 val_245
+276 val_277
+46 val_47
+78 val_79
+134 val_135
+196 val_197
+410 val_411
+82 val_83
+440 val_441
+100 val_101
+308 val_309
+430 val_431
+468 val_469
+152 val_153
+138 val_139
+76 val_77
+300 val_301
+478 val_479
+118 val_119
+178 val_179
+242 val_243
+244 val_245
+238 val_238
+86 val_86
+278 val_278
+98 val_98
+484 val_484
+150 val_150
+224 val_224
+66 val_66
+128 val_128
+146 val_146
+406 val_406
+374 val_374
+152 val_152
+82 val_82
+166 val_166
+430 val_430
+252 val_252
+292 val_292
+338 val_338
+446 val_446
+394 val_394
+482 val_482
+174 val_174
+494 val_494
+466 val_466
+208 val_208
+174 val_174
+396 val_396
+162 val_162
+266 val_266
+342 val_342
+0 val_0
+128 val_128
+316 val_316
+302 val_302
+438 val_438
+170 val_170
+20 val_20
+378 val_378
+92 val_92
+72 val_72
+4 val_4
+280 val_280
+208 val_208
+356 val_356
+382 val_382
+498 val_498
+386 val_386
+192 val_192
+286 val_286
+176 val_176
+54 val_54
+138 val_138
+216 val_216
+430 val_430
+278 val_278
+176 val_176
+318 val_318
+332 val_332
+180 val_180
+284 val_284
+12 val_12
+230 val_230
+260 val_260
+404 val_404
+384 val_384
+272 val_272
+138 val_138
+84 val_84
+348 val_348
+466 val_466
+58 val_58
+8 val_8
+230 val_230
+208 val_208
+348 val_348
+24 val_24
+172 val_172
+42 val_42
+158 val_158
+496 val_496
+0 val_0
+322 val_322
+468 val_468
+454 val_454
+100 val_100
+298 val_298
+418 val_418
+96 val_96
+26 val_26
+230 val_230
+120 val_120
+404 val_404
+436 val_436
+156 val_156
+468 val_468
+308 val_308
+196 val_196
+288 val_288
+98 val_98
+282 val_282
+318 val_318
+318 val_318
+470 val_470
+316 val_316
+0 val_0
+490 val_490
+364 val_364
+118 val_118
+134 val_134
+282 val_282
+138 val_138
+238 val_238
+118 val_118
+72 val_72
+90 val_90
+10 val_10
+306 val_306
+224 val_224
+242 val_242
+392 val_392
+272 val_272
+242 val_242
+452 val_452
+226 val_226
+402 val_402
+396 val_396
+58 val_58
+336 val_336
+168 val_168
+34 val_34
+472 val_472
+322 val_322
+498 val_498
+160 val_160
+42 val_42
+430 val_430
+458 val_458
+78 val_78
+76 val_76
+492 val_492
+218 val_218
+228 val_228
+138 val_138
+30 val_30
+64 val_64
+468 val_468
+76 val_76
+74 val_74
+342 val_342
+230 val_230
+368 val_368
+296 val_296
+216 val_216
+344 val_344
+274 val_274
+116 val_116
+256 val_256
+70 val_70
+480 val_480
+288 val_288
+244 val_244
+438 val_438
+128 val_128
+432 val_432
+202 val_202
+316 val_316
+280 val_280
+2 val_2
+80 val_80
+44 val_44
+104 val_104
+466 val_466
+366 val_366
+406 val_406
+190 val_190
+406 val_406
+114 val_114
+258 val_258
+90 val_90
+262 val_262
+348 val_348
+424 val_424
+12 val_12
+396 val_396
+164 val_164
+454 val_454
+478 val_478
+298 val_298
+164 val_164
+424 val_424
+382 val_382
+70 val_70
+480 val_480
+24 val_24
+104 val_104
+70 val_70
+438 val_438
+414 val_414
+200 val_200
+360 val_360
+248 val_248
+444 val_444
+120 val_120
+230 val_230
+478 val_478
+178 val_178
+468 val_468
+310 val_310
+460 val_460
+480 val_480
+136 val_136
+172 val_172
+214 val_214
+462 val_462
+406 val_406
+454 val_454
+384 val_384
+256 val_256
+26 val_26
+134 val_134
+384 val_384
+18 val_18
+462 val_462
+492 val_492
+100 val_100
+298 val_298
+498 val_498
+146 val_146
+458 val_458
+362 val_362
+186 val_186
+348 val_348
+18 val_18
+344 val_344
+84 val_84
+28 val_28
+448 val_448
+152 val_152
+348 val_348
+194 val_194
+414 val_414
+222 val_222
+126 val_126
+90 val_90
+400 val_400
+200 val_200
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 1 3 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 3) = 0)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 3) = 0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1351190369/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket
+ name: srcbucket
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1309963706/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1309963706/10000
+474 val_475
+468 val_469
+246 val_247
+126 val_127
+402 val_403
+0 val_1
+240 val_241
+408 val_409
+48 val_49
+402 val_403
+468 val_469
+114 val_115
+114 val_115
+390 val_391
+264 val_265
+492 val_493
+438 val_439
+360 val_361
+222 val_223
+42 val_43
+492 val_493
+120 val_121
+306 val_307
+426 val_427
+132 val_133
+246 val_247
+438 val_439
+390 val_391
+342 val_343
+480 val_481
+102 val_103
+480 val_481
+318 val_319
+258 val_259
+174 val_175
+252 val_253
+114 val_115
+264 val_265
+48 val_49
+336 val_337
+390 val_391
+6 val_7
+450 val_451
+30 val_31
+42 val_43
+168 val_169
+384 val_385
+324 val_325
+384 val_385
+42 val_43
+132 val_133
+192 val_193
+330 val_331
+138 val_139
+174 val_175
+204 val_205
+126 val_127
+216 val_217
+450 val_451
+12 val_13
+384 val_385
+60 val_61
+300 val_301
+0 val_1
+156 val_157
+6 val_7
+462 val_463
+432 val_433
+144 val_145
+408 val_409
+348 val_349
+474 val_475
+486 val_487
+378 val_379
+414 val_415
+156 val_157
+228 val_229
+402 val_403
+162 val_163
+276 val_277
+78 val_79
+468 val_469
+138 val_139
+300 val_301
+150 val_150
+66 val_66
+252 val_252
+174 val_174
+174 val_174
+396 val_396
+162 val_162
+342 val_342
+0 val_0
+438 val_438
+378 val_378
+72 val_72
+498 val_498
+192 val_192
+54 val_54
+138 val_138
+216 val_216
+318 val_318
+180 val_180
+12 val_12
+384 val_384
+138 val_138
+84 val_84
+348 val_348
+348 val_348
+24 val_24
+42 val_42
+0 val_0
+468 val_468
+96 val_96
+120 val_120
+156 val_156
+468 val_468
+288 val_288
+282 val_282
+318 val_318
+318 val_318
+0 val_0
+282 val_282
+138 val_138
+72 val_72
+90 val_90
+306 val_306
+402 val_402
+396 val_396
+336 val_336
+168 val_168
+498 val_498
+42 val_42
+78 val_78
+492 val_492
+228 val_228
+138 val_138
+30 val_30
+468 val_468
+342 val_342
+216 val_216
+480 val_480
+288 val_288
+438 val_438
+432 val_432
+366 val_366
+114 val_114
+258 val_258
+90 val_90
+348 val_348
+12 val_12
+396 val_396
+480 val_480
+24 val_24
+438 val_438
+414 val_414
+360 val_360
+444 val_444
+120 val_120
+468 val_468
+480 val_480
+462 val_462
+384 val_384
+384 val_384
+18 val_18
+462 val_462
+492 val_492
+498 val_498
+186 val_186
+348 val_348
+18 val_18
+84 val_84
+348 val_348
+414 val_414
+222 val_222
+126 val_126
+90 val_90
+291 val_292
+135 val_136
+423 val_424
+177 val_178
+231 val_232
+447 val_448
+147 val_148
+249 val_250
+21 val_22
+273 val_274
+441 val_442
+153 val_154
+33 val_34
+243 val_244
+333 val_334
+15 val_16
+477 val_478
+99 val_100
+393 val_394
+123 val_124
+63 val_64
+135 val_136
+3 val_4
+165 val_166
+129 val_130
+453 val_454
+249 val_250
+411 val_412
+429 val_430
+147 val_148
+213 val_214
+393 val_394
+291 val_292
+375 val_376
+129 val_130
+369 val_370
+351 val_352
+183 val_184
+177 val_178
+399 val_400
+15 val_16
+87 val_88
+75 val_76
+189 val_190
+51 val_52
+363 val_364
+117 val_118
+405 val_406
+153 val_154
+21 val_22
+105 val_106
+375 val_376
+363 val_364
+381 val_382
+69 val_70
+303 val_304
+303 val_304
+93 val_94
+351 val_352
+399 val_400
+21 val_22
+429 val_430
+267 val_268
+375 val_376
+489 val_490
+105 val_106
+93 val_94
+375 val_376
+375 val_376
+435 val_436
+21 val_22
+93 val_94
+123 val_124
+87 val_88
+261 val_262
+495 val_496
+243 val_244
+363 val_364
+117 val_118
+459 val_460
+309 val_310
+135 val_136
+285 val_286
+27 val_27
+165 val_165
+255 val_255
+273 val_273
+369 val_369
+213 val_213
+429 val_429
+495 val_495
+327 val_327
+15 val_15
+417 val_417
+219 val_219
+153 val_153
+459 val_459
+237 val_237
+207 val_207
+399 val_399
+417 val_417
+489 val_489
+309 val_309
+195 val_195
+339 val_339
+57 val_57
+345 val_345
+129 val_129
+489 val_489
+111 val_111
+399 val_399
+459 val_459
+51 val_51
+213 val_213
+333 val_333
+489 val_489
+411 val_411
+129 val_129
+393 val_393
+165 val_165
+327 val_327
+51 val_51
+369 val_369
+87 val_87
+15 val_15
+435 val_435
+273 val_273
+309 val_309
+327 val_327
+369 val_369
+177 val_177
+195 val_195
+321 val_321
+489 val_489
+453 val_453
+69 val_69
+33 val_33
+219 val_219
+321 val_321
+483 val_483
+105 val_105
+201 val_201
+291 val_291
+351 val_351
+255 val_255
+237 val_237
+417 val_417
+429 val_429
+333 val_333
+207 val_207
+249 val_249
+189 val_189
+375 val_375
+9 val_9
+285 val_285
+273 val_273
+183 val_183
+315 val_315
+477 val_477
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket (TOK_TABLESAMPLE 2 3 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 3) = 1)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 3) = 1)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/2097418985/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 2
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket
+ name: srcbucket
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/825631526/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/825631526/10000
+448 val_449
+106 val_107
+490 val_491
+10 val_11
+226 val_227
+286 val_287
+424 val_425
+226 val_227
+94 val_95
+4 val_5
+304 val_305
+196 val_197
+238 val_239
+82 val_83
+58 val_59
+16 val_17
+376 val_377
+388 val_389
+184 val_185
+262 val_263
+352 val_353
+226 val_227
+328 val_329
+382 val_383
+340 val_341
+484 val_485
+118 val_119
+310 val_311
+358 val_359
+454 val_455
+364 val_365
+52 val_53
+40 val_41
+310 val_311
+226 val_227
+262 val_263
+328 val_329
+322 val_323
+76 val_77
+406 val_407
+58 val_59
+70 val_71
+352 val_353
+52 val_53
+160 val_161
+454 val_455
+76 val_77
+412 val_413
+16 val_17
+274 val_275
+196 val_197
+334 val_335
+442 val_443
+52 val_53
+268 val_269
+436 val_437
+118 val_119
+172 val_173
+244 val_245
+136 val_137
+496 val_497
+382 val_383
+292 val_293
+310 val_311
+52 val_53
+256 val_257
+292 val_293
+412 val_413
+40 val_41
+478 val_479
+178 val_179
+100 val_101
+22 val_23
+244 val_245
+46 val_47
+196 val_197
+82 val_83
+100 val_101
+430 val_431
+76 val_77
+478 val_479
+118 val_119
+178 val_179
+244 val_245
+238 val_238
+484 val_484
+406 val_406
+82 val_82
+166 val_166
+430 val_430
+292 val_292
+394 val_394
+466 val_466
+208 val_208
+316 val_316
+4 val_4
+280 val_280
+208 val_208
+382 val_382
+286 val_286
+430 val_430
+466 val_466
+58 val_58
+208 val_208
+172 val_172
+496 val_496
+322 val_322
+454 val_454
+100 val_100
+298 val_298
+418 val_418
+436 val_436
+196 val_196
+316 val_316
+490 val_490
+364 val_364
+118 val_118
+238 val_238
+118 val_118
+10 val_10
+226 val_226
+58 val_58
+34 val_34
+472 val_472
+322 val_322
+160 val_160
+430 val_430
+76 val_76
+64 val_64
+76 val_76
+274 val_274
+256 val_256
+70 val_70
+244 val_244
+202 val_202
+316 val_316
+280 val_280
+466 val_466
+406 val_406
+190 val_190
+406 val_406
+262 val_262
+424 val_424
+454 val_454
+478 val_478
+298 val_298
+424 val_424
+382 val_382
+70 val_70
+70 val_70
+478 val_478
+178 val_178
+310 val_310
+460 val_460
+136 val_136
+172 val_172
+214 val_214
+406 val_406
+454 val_454
+256 val_256
+100 val_100
+298 val_298
+28 val_28
+448 val_448
+400 val_400
+271 val_272
+217 val_218
+241 val_242
+31 val_32
+373 val_374
+175 val_176
+217 val_218
+421 val_422
+133 val_134
+391 val_392
+343 val_344
+241 val_242
+85 val_86
+367 val_368
+349 val_350
+409 val_410
+265 val_266
+277 val_278
+331 val_332
+235 val_236
+289 val_290
+421 val_422
+175 val_176
+427 val_428
+121 val_122
+349 val_350
+487 val_488
+331 val_332
+409 val_410
+475 val_476
+463 val_464
+367 val_368
+349 val_350
+355 val_356
+19 val_20
+121 val_122
+241 val_242
+49 val_50
+157 val_158
+199 val_200
+313 val_314
+385 val_386
+259 val_260
+241 val_242
+469 val_470
+481 val_482
+277 val_278
+451 val_452
+205 val_206
+295 val_296
+439 val_440
+457 val_458
+391 val_392
+151 val_152
+295 val_296
+61 val_62
+439 val_440
+349 val_350
+355 val_356
+427 val_428
+97 val_98
+385 val_386
+49 val_50
+259 val_260
+97 val_98
+151 val_152
+415 val_416
+379 val_380
+157 val_158
+337 val_338
+91 val_92
+1 val_2
+457 val_458
+409 val_409
+265 val_265
+193 val_193
+469 val_469
+145 val_145
+37 val_37
+277 val_277
+403 val_403
+193 val_193
+199 val_199
+247 val_247
+397 val_397
+439 val_439
+367 val_367
+325 val_325
+475 val_475
+205 val_205
+157 val_157
+427 val_427
+277 val_277
+169 val_169
+469 val_469
+187 val_187
+103 val_103
+289 val_289
+241 val_241
+181 val_181
+67 val_67
+373 val_373
+217 val_217
+463 val_463
+199 val_199
+205 val_205
+43 val_43
+469 val_469
+481 val_481
+457 val_457
+187 val_187
+409 val_409
+169 val_169
+85 val_85
+307 val_307
+19 val_19
+277 val_277
+331 val_331
+229 val_229
+223 val_223
+103 val_103
+367 val_367
+223 val_223
+487 val_487
+229 val_229
+469 val_469
+463 val_463
+283 val_283
+331 val_331
+235 val_235
+193 val_193
+175 val_175
+403 val_403
+409 val_409
+217 val_217
+187 val_187
+397 val_397
+163 val_163
+439 val_439
+199 val_199
+169 val_169
+325 val_325
+277 val_277
+493 val_493
+265 val_265
+133 val_133
+175 val_175
+421 val_421
+67 val_67
+379 val_379
+97 val_97
+469 val_469
+37 val_37
+307 val_307
+169 val_169
+403 val_403
+97 val_97
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket2 (TOK_TABLESAMPLE 1 2 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 2) = 0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/957552073/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt [s]
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket2
+ name: srcbucket2
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket2
+ name: srcbucket2
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket2
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/650790737/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket2
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/650790737/10000
+484 val_484
+150 val_150
+224 val_224
+66 val_66
+374 val_374
+338 val_338
+466 val_466
+396 val_396
+0 val_0
+316 val_316
+378 val_378
+4 val_4
+356 val_356
+286 val_286
+176 val_176
+176 val_176
+260 val_260
+404 val_404
+84 val_84
+466 val_466
+8 val_8
+172 val_172
+158 val_158
+0 val_0
+26 val_26
+404 val_404
+282 val_282
+316 val_316
+0 val_0
+118 val_118
+282 val_282
+118 val_118
+224 val_224
+242 val_242
+392 val_392
+242 val_242
+396 val_396
+228 val_228
+480 val_480
+202 val_202
+316 val_316
+80 val_80
+44 val_44
+466 val_466
+190 val_190
+114 val_114
+396 val_396
+480 val_480
+444 val_444
+480 val_480
+136 val_136
+172 val_172
+462 val_462
+26 val_26
+462 val_462
+84 val_84
+448 val_448
+194 val_194
+400 val_400
+86 val_86
+406 val_406
+152 val_152
+82 val_82
+446 val_446
+394 val_394
+482 val_482
+174 val_174
+208 val_208
+174 val_174
+266 val_266
+170 val_170
+20 val_20
+280 val_280
+208 val_208
+192 val_192
+138 val_138
+318 val_318
+332 val_332
+284 val_284
+138 val_138
+208 val_208
+24 val_24
+42 val_42
+468 val_468
+156 val_156
+468 val_468
+196 val_196
+288 val_288
+318 val_318
+318 val_318
+134 val_134
+138 val_138
+226 val_226
+402 val_402
+336 val_336
+42 val_42
+138 val_138
+64 val_64
+468 val_468
+116 val_116
+288 val_288
+244 val_244
+280 val_280
+2 val_2
+406 val_406
+406 val_406
+262 val_262
+424 val_424
+424 val_424
+24 val_24
+200 val_200
+248 val_248
+178 val_178
+468 val_468
+310 val_310
+460 val_460
+406 val_406
+134 val_134
+28 val_28
+152 val_152
+222 val_222
+200 val_200
+PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcbucket2 (TOK_TABLESAMPLE 2 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF s)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ s
+ TableScan
+ alias: s
+ Filter Operator
+ isSamplingPred: false
+ predicate:
+ expr: (((hash(key) & 2147483647) % 4) = 1)
+ type: boolean
+ Filter Operator
+ isSamplingPred: true
+ predicate:
+ expr: (((hash(key) & 2147483647) % 4) = 1)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/264562515/10001
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ columns _col0,_col1
+ serialization.format 1
+ columns.types int:string
+ Needs Tagging: false
+ Path -> Alias:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt [s]
+ Path -> Partition:
+ file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt
+ Partition
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ name srcbucket2
+ columns.types int:string
+ bucket_field_name key
+ serialization.ddl struct srcbucket2 { i32 key, string value}
+ columns key,value
+ serialization.format 1
+ bucket_count 4
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket2
+ transient_lastDdlTime 1261259090
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: srcbucket2
+ name: srcbucket2
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+
+PREHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket2
+PREHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1511867857/10000
+POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket2
+POSTHOOK: Output: file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1511867857/10000
+409 val_409
+265 val_265
+401 val_401
+489 val_489
+397 val_397
+489 val_489
+221 val_221
+221 val_221
+137 val_137
+489 val_489
+353 val_353
+393 val_393
+481 val_481
+409 val_409
+137 val_137
+85 val_85
+401 val_401
+177 val_177
+5 val_5
+317 val_317
+229 val_229
+489 val_489
+41 val_41
+449 val_449
+485 val_485
+401 val_401
+5 val_5
+229 val_229
+409 val_409
+401 val_401
+5 val_5
+397 val_397
+317 val_317
+265 val_265
+353 val_353
+133 val_133
+401 val_401
+9 val_9
+PREHOOK: query: drop table dest1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest1
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml?rev=892539&r1=892538&r2=892539&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml Sun Dec 20 00:04:29 2009
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
-<java version="1.6.0_07" class="java.beans.XMLDecoder">
+<java version="1.6.0_17" class="java.beans.XMLDecoder">
<object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask">
<void property="childTasks">
<object class="java.util.ArrayList">
@@ -30,7 +30,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1437829711/10000</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1021083274/10000</string>
</void>
<void property="table">
<object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -83,11 +83,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648355</string>
+ <string>1261259928</string>
</void>
</object>
</void>
@@ -97,7 +97,7 @@
</object>
</void>
<void property="tmpDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1437829711/10001</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1021083274/10001</string>
</void>
</object>
</void>
@@ -125,10 +125,10 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
</void>
<void property="targetDir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1437829711/10000</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1021083274/10000</string>
</void>
</object>
</void>
@@ -146,7 +146,7 @@
<void property="aliasToWork">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
<object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator">
<void property="childOperators">
<object class="java.util.ArrayList">
@@ -385,10 +385,10 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
<object class="java.util.ArrayList">
<void method="add">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
</void>
</object>
</void>
@@ -397,7 +397,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
@@ -430,7 +430,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc">
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1437829711/10000</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/1021083274/10000</string>
</void>
<void property="tableInfo">
<object class="org.apache.hadoop.hive.ql.plan.tableDesc">
@@ -479,7 +479,7 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/dest1</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/dest1</string>
</void>
<void method="put">
<string>file.outputformat</string>
@@ -487,7 +487,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648355</string>
+ <string>1261259928</string>
</void>
</object>
</void>
@@ -621,7 +621,7 @@
<void property="resolverCtx">
<object class="org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles$ConditionalResolverMergeFilesCtx">
<void property="dir">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
</void>
<void property="listTasks">
<object idref="ArrayList0"/>
@@ -711,11 +711,11 @@
</void>
<void method="put">
<string>location</string>
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket</string>
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1259648354</string>
+ <string>1261259927</string>
</void>
</object>
</void>
@@ -771,7 +771,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/686111447/10002</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/tmp/661511499/10002</string>
</void>
<void property="tableInfo">
<object idref="tableDesc0"/>
@@ -1060,6 +1060,9 @@
</void>
</object>
</void>
+ <void property="sampleDescr">
+ <object class="org.apache.hadoop.hive.ql.plan.filterDesc$sampleDesc"/>
+ </void>
</object>
</void>
<void property="counterNames">
@@ -1324,7 +1327,7 @@
<void property="pathToAliases">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="java.util.ArrayList">
<void method="add">
<string>s</string>
@@ -1336,7 +1339,7 @@
<void property="pathToPartitionInfo">
<object class="java.util.LinkedHashMap">
<void method="put">
- <string>file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
+ <string>file:/Users/heyongqiang/Documents/workspace/Hive-Test/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string>
<object class="org.apache.hadoop.hive.ql.plan.partitionDesc">
<void property="deserializerClass">
<class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>