You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2016/08/24 20:57:31 UTC

[37/51] [partial] hive git commit: HIVE-14553: Remove tez golden files after HIVE-14502 (Prasanth Jayachandran reviewed by Siddharth Seth)

http://git-wip-us.apache.org/repos/asf/hive/blob/975a49b6/ql/src/test/results/clientpositive/tez/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket2.q.out b/ql/src/test/results/clientpositive/tez/bucket2.q.out
deleted file mode 100644
index bcf9ab9..0000000
--- a/ql/src/test/results/clientpositive/tez/bucket2.q.out
+++ /dev/null
@@ -1,466 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@bucket2_1
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@bucket2_1
-PREHOOK: query: explain extended
-insert overwrite table bucket2_1
-select * from src
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-insert overwrite table bucket2_1
-select * from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      null sort order: 
-                      sort order: 
-                      Map-reduce partition columns: UDFToInteger(_col0) (type: int)
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      tag: -1
-                      value expressions: _col0 (type: string), _col1 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [src]
-        Reducer 2 
-            Needs Tagging: false
-            Reduce Operator Tree:
-              Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 2
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        bucket_count 2
-                        bucket_field_name key
-                        columns key,value
-                        columns.comments 
-                        columns.types int:string
-#### A masked pattern was here ####
-                        name default.bucket2_1
-                        numFiles 0
-                        numRows 0
-                        rawDataSize 0
-                        serialization.ddl struct bucket2_1 { i32 key, string value}
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 0
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.bucket2_1
-                  TotalFiles: 2
-                  GatherStats: true
-                  MultiFileSpray: true
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                bucket_count 2
-                bucket_field_name key
-                columns key,value
-                columns.comments 
-                columns.types int:string
-#### A masked pattern was here ####
-                name default.bucket2_1
-                numFiles 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct bucket2_1 { i32 key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 0
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.bucket2_1
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table bucket2_1
-select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@bucket2_1
-POSTHOOK: query: insert overwrite table bucket2_1
-select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@bucket2_1
-POSTHOOK: Lineage: bucket2_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: bucket2_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: explain
-select * from bucket2_1 tablesample (bucket 1 out of 2) s
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select * from bucket2_1 tablesample (bucket 1 out of 2) s
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        TableScan
-          alias: s
-          Filter Operator
-            predicate: (((hash(key) & 2147483647) % 2) = 0) (type: boolean)
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              ListSink
-
-PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s
-PREHOOK: type: QUERY
-PREHOOK: Input: default@bucket2_1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@bucket2_1
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-104	val_104
-104	val_104
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-134	val_134
-134	val_134
-136	val_136
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-146	val_146
-146	val_146
-150	val_150
-152	val_152
-152	val_152
-156	val_156
-158	val_158
-160	val_160
-162	val_162
-164	val_164
-164	val_164
-166	val_166
-168	val_168
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-176	val_176
-176	val_176
-178	val_178
-18	val_18
-18	val_18
-180	val_180
-186	val_186
-190	val_190
-192	val_192
-194	val_194
-196	val_196
-2	val_2
-20	val_20
-200	val_200
-200	val_200
-202	val_202
-208	val_208
-208	val_208
-208	val_208
-214	val_214
-216	val_216
-216	val_216
-218	val_218
-222	val_222
-224	val_224
-224	val_224
-226	val_226
-228	val_228
-230	val_230
-230	val_230
-230	val_230
-230	val_230
-230	val_230
-238	val_238
-238	val_238
-24	val_24
-24	val_24
-242	val_242
-242	val_242
-244	val_244
-248	val_248
-252	val_252
-256	val_256
-256	val_256
-258	val_258
-26	val_26
-26	val_26
-260	val_260
-262	val_262
-266	val_266
-272	val_272
-272	val_272
-274	val_274
-278	val_278
-278	val_278
-28	val_28
-280	val_280
-280	val_280
-282	val_282
-282	val_282
-284	val_284
-286	val_286
-288	val_288
-288	val_288
-292	val_292
-296	val_296
-298	val_298
-298	val_298
-298	val_298
-30	val_30
-302	val_302
-306	val_306
-308	val_308
-310	val_310
-316	val_316
-316	val_316
-316	val_316
-318	val_318
-318	val_318
-318	val_318
-322	val_322
-322	val_322
-332	val_332
-336	val_336
-338	val_338
-34	val_34
-342	val_342
-342	val_342
-344	val_344
-344	val_344
-348	val_348
-348	val_348
-348	val_348
-348	val_348
-348	val_348
-356	val_356
-360	val_360
-362	val_362
-364	val_364
-366	val_366
-368	val_368
-374	val_374
-378	val_378
-382	val_382
-382	val_382
-384	val_384
-384	val_384
-384	val_384
-386	val_386
-392	val_392
-394	val_394
-396	val_396
-396	val_396
-396	val_396
-4	val_4
-400	val_400
-402	val_402
-404	val_404
-404	val_404
-406	val_406
-406	val_406
-406	val_406
-406	val_406
-414	val_414
-414	val_414
-418	val_418
-42	val_42
-42	val_42
-424	val_424
-424	val_424
-430	val_430
-430	val_430
-430	val_430
-432	val_432
-436	val_436
-438	val_438
-438	val_438
-438	val_438
-44	val_44
-444	val_444
-446	val_446
-448	val_448
-452	val_452
-454	val_454
-454	val_454
-454	val_454
-458	val_458
-458	val_458
-460	val_460
-462	val_462
-462	val_462
-466	val_466
-466	val_466
-466	val_466
-468	val_468
-468	val_468
-468	val_468
-468	val_468
-470	val_470
-472	val_472
-478	val_478
-478	val_478
-480	val_480
-480	val_480
-480	val_480
-482	val_482
-484	val_484
-490	val_490
-492	val_492
-492	val_492
-494	val_494
-496	val_496
-498	val_498
-498	val_498
-498	val_498
-54	val_54
-58	val_58
-58	val_58
-64	val_64
-66	val_66
-70	val_70
-70	val_70
-70	val_70
-72	val_72
-72	val_72
-74	val_74
-76	val_76
-76	val_76
-78	val_78
-8	val_8
-80	val_80
-82	val_82
-84	val_84
-84	val_84
-86	val_86
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-96	val_96
-98	val_98
-98	val_98

http://git-wip-us.apache.org/repos/asf/hive/blob/975a49b6/ql/src/test/results/clientpositive/tez/bucket3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket3.q.out b/ql/src/test/results/clientpositive/tez/bucket3.q.out
deleted file mode 100644
index eabcea0..0000000
--- a/ql/src/test/results/clientpositive/tez/bucket3.q.out
+++ /dev/null
@@ -1,477 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@bucket3_1
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@bucket3_1
-PREHOOK: query: explain extended
-insert overwrite table bucket3_1 partition (ds='1')
-select * from src
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-insert overwrite table bucket3_1 partition (ds='1')
-select * from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      null sort order: 
-                      sort order: 
-                      Map-reduce partition columns: UDFToInteger(_col0) (type: int)
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      tag: -1
-                      value expressions: _col0 (type: string), _col1 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [src]
-        Reducer 2 
-            Needs Tagging: false
-            Reduce Operator Tree:
-              Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 2
-                  Static Partition Specification: ds=1/
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        bucket_count 2
-                        bucket_field_name key
-                        columns key,value
-                        columns.comments 
-                        columns.types int:string
-#### A masked pattern was here ####
-                        name default.bucket3_1
-                        partition_columns ds
-                        partition_columns.types string
-                        serialization.ddl struct bucket3_1 { i32 key, string value}
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.bucket3_1
-                  TotalFiles: 2
-                  GatherStats: true
-                  MultiFileSpray: true
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 1
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count 2
-                bucket_field_name key
-                columns key,value
-                columns.comments 
-                columns.types int:string
-#### A masked pattern was here ####
-                name default.bucket3_1
-                partition_columns ds
-                partition_columns.types string
-                serialization.ddl struct bucket3_1 { i32 key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.bucket3_1
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
-select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@bucket3_1@ds=1
-POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
-select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@bucket3_1@ds=1
-POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
-select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@bucket3_1@ds=2
-POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
-select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@bucket3_1@ds=2
-POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: explain
-select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        TableScan
-          alias: s
-          Filter Operator
-            predicate: (((hash(key) & 2147483647) % 2) = 0) (type: boolean)
-            Select Operator
-              expressions: key (type: int), value (type: string), '1' (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              ListSink
-
-PREHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@bucket3_1
-PREHOOK: Input: default@bucket3_1@ds=1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@bucket3_1
-POSTHOOK: Input: default@bucket3_1@ds=1
-#### A masked pattern was here ####
-0	val_0	1
-0	val_0	1
-0	val_0	1
-10	val_10	1
-100	val_100	1
-100	val_100	1
-104	val_104	1
-104	val_104	1
-114	val_114	1
-116	val_116	1
-118	val_118	1
-118	val_118	1
-12	val_12	1
-12	val_12	1
-120	val_120	1
-120	val_120	1
-126	val_126	1
-128	val_128	1
-128	val_128	1
-128	val_128	1
-134	val_134	1
-134	val_134	1
-136	val_136	1
-138	val_138	1
-138	val_138	1
-138	val_138	1
-138	val_138	1
-146	val_146	1
-146	val_146	1
-150	val_150	1
-152	val_152	1
-152	val_152	1
-156	val_156	1
-158	val_158	1
-160	val_160	1
-162	val_162	1
-164	val_164	1
-164	val_164	1
-166	val_166	1
-168	val_168	1
-170	val_170	1
-172	val_172	1
-172	val_172	1
-174	val_174	1
-174	val_174	1
-176	val_176	1
-176	val_176	1
-178	val_178	1
-18	val_18	1
-18	val_18	1
-180	val_180	1
-186	val_186	1
-190	val_190	1
-192	val_192	1
-194	val_194	1
-196	val_196	1
-2	val_2	1
-20	val_20	1
-200	val_200	1
-200	val_200	1
-202	val_202	1
-208	val_208	1
-208	val_208	1
-208	val_208	1
-214	val_214	1
-216	val_216	1
-216	val_216	1
-218	val_218	1
-222	val_222	1
-224	val_224	1
-224	val_224	1
-226	val_226	1
-228	val_228	1
-230	val_230	1
-230	val_230	1
-230	val_230	1
-230	val_230	1
-230	val_230	1
-238	val_238	1
-238	val_238	1
-24	val_24	1
-24	val_24	1
-242	val_242	1
-242	val_242	1
-244	val_244	1
-248	val_248	1
-252	val_252	1
-256	val_256	1
-256	val_256	1
-258	val_258	1
-26	val_26	1
-26	val_26	1
-260	val_260	1
-262	val_262	1
-266	val_266	1
-272	val_272	1
-272	val_272	1
-274	val_274	1
-278	val_278	1
-278	val_278	1
-28	val_28	1
-280	val_280	1
-280	val_280	1
-282	val_282	1
-282	val_282	1
-284	val_284	1
-286	val_286	1
-288	val_288	1
-288	val_288	1
-292	val_292	1
-296	val_296	1
-298	val_298	1
-298	val_298	1
-298	val_298	1
-30	val_30	1
-302	val_302	1
-306	val_306	1
-308	val_308	1
-310	val_310	1
-316	val_316	1
-316	val_316	1
-316	val_316	1
-318	val_318	1
-318	val_318	1
-318	val_318	1
-322	val_322	1
-322	val_322	1
-332	val_332	1
-336	val_336	1
-338	val_338	1
-34	val_34	1
-342	val_342	1
-342	val_342	1
-344	val_344	1
-344	val_344	1
-348	val_348	1
-348	val_348	1
-348	val_348	1
-348	val_348	1
-348	val_348	1
-356	val_356	1
-360	val_360	1
-362	val_362	1
-364	val_364	1
-366	val_366	1
-368	val_368	1
-374	val_374	1
-378	val_378	1
-382	val_382	1
-382	val_382	1
-384	val_384	1
-384	val_384	1
-384	val_384	1
-386	val_386	1
-392	val_392	1
-394	val_394	1
-396	val_396	1
-396	val_396	1
-396	val_396	1
-4	val_4	1
-400	val_400	1
-402	val_402	1
-404	val_404	1
-404	val_404	1
-406	val_406	1
-406	val_406	1
-406	val_406	1
-406	val_406	1
-414	val_414	1
-414	val_414	1
-418	val_418	1
-42	val_42	1
-42	val_42	1
-424	val_424	1
-424	val_424	1
-430	val_430	1
-430	val_430	1
-430	val_430	1
-432	val_432	1
-436	val_436	1
-438	val_438	1
-438	val_438	1
-438	val_438	1
-44	val_44	1
-444	val_444	1
-446	val_446	1
-448	val_448	1
-452	val_452	1
-454	val_454	1
-454	val_454	1
-454	val_454	1
-458	val_458	1
-458	val_458	1
-460	val_460	1
-462	val_462	1
-462	val_462	1
-466	val_466	1
-466	val_466	1
-466	val_466	1
-468	val_468	1
-468	val_468	1
-468	val_468	1
-468	val_468	1
-470	val_470	1
-472	val_472	1
-478	val_478	1
-478	val_478	1
-480	val_480	1
-480	val_480	1
-480	val_480	1
-482	val_482	1
-484	val_484	1
-490	val_490	1
-492	val_492	1
-492	val_492	1
-494	val_494	1
-496	val_496	1
-498	val_498	1
-498	val_498	1
-498	val_498	1
-54	val_54	1
-58	val_58	1
-58	val_58	1
-64	val_64	1
-66	val_66	1
-70	val_70	1
-70	val_70	1
-70	val_70	1
-72	val_72	1
-72	val_72	1
-74	val_74	1
-76	val_76	1
-76	val_76	1
-78	val_78	1
-8	val_8	1
-80	val_80	1
-82	val_82	1
-84	val_84	1
-84	val_84	1
-86	val_86	1
-90	val_90	1
-90	val_90	1
-90	val_90	1
-92	val_92	1
-96	val_96	1
-98	val_98	1
-98	val_98	1

http://git-wip-us.apache.org/repos/asf/hive/blob/975a49b6/ql/src/test/results/clientpositive/tez/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket4.q.out b/ql/src/test/results/clientpositive/tez/bucket4.q.out
deleted file mode 100644
index f029dfd..0000000
--- a/ql/src/test/results/clientpositive/tez/bucket4.q.out
+++ /dev/null
@@ -1,465 +0,0 @@
-PREHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@bucket4_1
-POSTHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@bucket4_1
-PREHOOK: query: explain extended
-insert overwrite table bucket4_1
-select * from src
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-insert overwrite table bucket4_1
-select * from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      key expressions: UDFToInteger(_col0) (type: int)
-                      null sort order: a
-                      sort order: +
-                      Map-reduce partition columns: UDFToInteger(_col0) (type: int)
-                      Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
-                      tag: -1
-                      value expressions: _col0 (type: string), _col1 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [src]
-        Reducer 2 
-            Needs Tagging: false
-            Reduce Operator Tree:
-              Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), VALUE._col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 2
-                  Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        SORTBUCKETCOLSPREFIX TRUE
-                        bucket_count 2
-                        bucket_field_name key
-                        columns key,value
-                        columns.comments 
-                        columns.types int:string
-#### A masked pattern was here ####
-                        name default.bucket4_1
-                        numFiles 0
-                        numRows 0
-                        rawDataSize 0
-                        serialization.ddl struct bucket4_1 { i32 key, string value}
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 0
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.bucket4_1
-                  TotalFiles: 2
-                  GatherStats: true
-                  MultiFileSpray: true
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                SORTBUCKETCOLSPREFIX TRUE
-                bucket_count 2
-                bucket_field_name key
-                columns key,value
-                columns.comments 
-                columns.types int:string
-#### A masked pattern was here ####
-                name default.bucket4_1
-                numFiles 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct bucket4_1 { i32 key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 0
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.bucket4_1
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table bucket4_1
-select * from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@bucket4_1
-POSTHOOK: query: insert overwrite table bucket4_1
-select * from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@bucket4_1
-POSTHOOK: Lineage: bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: explain
-select * from bucket4_1 tablesample (bucket 1 out of 2) s
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-select * from bucket4_1 tablesample (bucket 1 out of 2) s
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        TableScan
-          alias: s
-          Filter Operator
-            predicate: (((hash(key) & 2147483647) % 2) = 0) (type: boolean)
-            Select Operator
-              expressions: key (type: int), value (type: string)
-              outputColumnNames: _col0, _col1
-              ListSink
-
-PREHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s
-PREHOOK: type: QUERY
-PREHOOK: Input: default@bucket4_1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@bucket4_1
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-2	val_2
-4	val_4
-8	val_8
-10	val_10
-12	val_12
-12	val_12
-18	val_18
-18	val_18
-20	val_20
-24	val_24
-24	val_24
-26	val_26
-26	val_26
-28	val_28
-30	val_30
-34	val_34
-42	val_42
-42	val_42
-44	val_44
-54	val_54
-58	val_58
-58	val_58
-64	val_64
-66	val_66
-70	val_70
-70	val_70
-70	val_70
-72	val_72
-72	val_72
-74	val_74
-76	val_76
-76	val_76
-78	val_78
-80	val_80
-82	val_82
-84	val_84
-84	val_84
-86	val_86
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-96	val_96
-98	val_98
-98	val_98
-100	val_100
-100	val_100
-104	val_104
-104	val_104
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-120	val_120
-120	val_120
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-134	val_134
-134	val_134
-136	val_136
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-146	val_146
-146	val_146
-150	val_150
-152	val_152
-152	val_152
-156	val_156
-158	val_158
-160	val_160
-162	val_162
-164	val_164
-164	val_164
-166	val_166
-168	val_168
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-176	val_176
-176	val_176
-178	val_178
-180	val_180
-186	val_186
-190	val_190
-192	val_192
-194	val_194
-196	val_196
-200	val_200
-200	val_200
-202	val_202
-208	val_208
-208	val_208
-208	val_208
-214	val_214
-216	val_216
-216	val_216
-218	val_218
-222	val_222
-224	val_224
-224	val_224
-226	val_226
-228	val_228
-230	val_230
-230	val_230
-230	val_230
-230	val_230
-230	val_230
-238	val_238
-238	val_238
-242	val_242
-242	val_242
-244	val_244
-248	val_248
-252	val_252
-256	val_256
-256	val_256
-258	val_258
-260	val_260
-262	val_262
-266	val_266
-272	val_272
-272	val_272
-274	val_274
-278	val_278
-278	val_278
-280	val_280
-280	val_280
-282	val_282
-282	val_282
-284	val_284
-286	val_286
-288	val_288
-288	val_288
-292	val_292
-296	val_296
-298	val_298
-298	val_298
-298	val_298
-302	val_302
-306	val_306
-308	val_308
-310	val_310
-316	val_316
-316	val_316
-316	val_316
-318	val_318
-318	val_318
-318	val_318
-322	val_322
-322	val_322
-332	val_332
-336	val_336
-338	val_338
-342	val_342
-342	val_342
-344	val_344
-344	val_344
-348	val_348
-348	val_348
-348	val_348
-348	val_348
-348	val_348
-356	val_356
-360	val_360
-362	val_362
-364	val_364
-366	val_366
-368	val_368
-374	val_374
-378	val_378
-382	val_382
-382	val_382
-384	val_384
-384	val_384
-384	val_384
-386	val_386
-392	val_392
-394	val_394
-396	val_396
-396	val_396
-396	val_396
-400	val_400
-402	val_402
-404	val_404
-404	val_404
-406	val_406
-406	val_406
-406	val_406
-406	val_406
-414	val_414
-414	val_414
-418	val_418
-424	val_424
-424	val_424
-430	val_430
-430	val_430
-430	val_430
-432	val_432
-436	val_436
-438	val_438
-438	val_438
-438	val_438
-444	val_444
-446	val_446
-448	val_448
-452	val_452
-454	val_454
-454	val_454
-454	val_454
-458	val_458
-458	val_458
-460	val_460
-462	val_462
-462	val_462
-466	val_466
-466	val_466
-466	val_466
-468	val_468
-468	val_468
-468	val_468
-468	val_468
-470	val_470
-472	val_472
-478	val_478
-478	val_478
-480	val_480
-480	val_480
-480	val_480
-482	val_482
-484	val_484
-490	val_490
-492	val_492
-492	val_492
-494	val_494
-496	val_496
-498	val_498
-498	val_498
-498	val_498