You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by co...@apache.org on 2013/04/24 22:31:02 UTC
[1/5] BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Updated Branches:
refs/heads/master 7b2f77dd2 -> 8d32a92d0
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out
deleted file mode 100644
index ae4bf59..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/out
+++ /dev/null
@@ -1,224 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-set datanucleus.cache.collections=false
-set hive.stats.autogather=false
-set hive.exec.dynamic.partition=true
-set hive.exec.dynamic.partition.mode=nonstrict
-
-
-create table analyze_srcpart like srcpart
-
-insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null
-
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
-ABSTRACT SYNTAX TREE:
- (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr 11))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
- Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- analyze_srcpart
- TableScan
- alias: analyze_srcpart
-
- Stage: Stage-1
- Stats-Aggr Operator
-
-
-
-analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678579, numRows=0, totalSize=5812})
-
-describe extended analyze_srcpart
-key string
-value string
-ds string
-hr string
-
-Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:hudson, createTime:1301678564, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=1, numFiles=1, transient_lastDdlTime=1301678579, numRows=0
, totalSize=5812}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
-
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
-ABSTRACT SYNTAX TREE:
- (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr 12))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
- Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- analyze_srcpart
- TableScan
- alias: analyze_srcpart
-
- Stage: Stage-1
- Stats-Aggr Operator
-
-
-
-analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678584, numRows=0, totalSize=5812})
-
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics
-ABSTRACT SYNTAX TREE:
- (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-09') (TOK_PARTVAL hr 11))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
- Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- analyze_srcpart
- TableScan
- alias: analyze_srcpart
-
- Stage: Stage-1
- Stats-Aggr Operator
-
-
-
-analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678590, numRows=0, totalSize=5812})
-
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics
-ABSTRACT SYNTAX TREE:
- (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-09') (TOK_PARTVAL hr 12))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
- Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- analyze_srcpart
- TableScan
- alias: analyze_srcpart
-
- Stage: Stage-1
- Stats-Aggr Operator
-
-
-
-analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678595, numRows=0, totalSize=5812})
-
-
-explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics
-ABSTRACT SYNTAX TREE:
- (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart) (TOK_PARTSPEC (TOK_PARTVAL ds) (TOK_PARTVAL hr))))
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
- Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-0
- Map Reduce
- Alias -> Map Operator Tree:
- analyze_srcpart
- TableScan
- alias: analyze_srcpart
-
- Stage: Stage-1
- Stats-Aggr Operator
-
-
-
-analyze table analyze_srcpart PARTITION(ds, hr) compute statistics
-
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-08, 11], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678602, numRows=0, totalSize=5812})
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-08, 12], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678602, numRows=0, totalSize=5812})
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-09, 11], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-09/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678602, numRows=0, totalSize=5812})
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12)
-key string
-value string
-ds string
-hr string
-
-Detailed Partition Information Partition(values:[2008-04-09, 12], dbName:default, tableName:analyze_srcpart, createTime:1301678572, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart/ds=2008-04-09/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1301678602, numRows=0, totalSize=5812})
-
-describe extended analyze_srcpart
-key string
-value string
-ds string
-hr string
-
-Detailed Table Information Table(tableName:analyze_srcpart, dbName:default, owner:hudson, createTime:1301678564, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/analyze_srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=4, numFiles=4, transient_lastDdlTime=1301678602, numRows=0
, totalSize=23248}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/filter
index 519dc5c..fb6b844 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/filter
@@ -1,2 +1,4 @@
sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
- -e '/^Deleted hdfs/d'
+ -e '/^Deleted hdfs/d' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out
index 1078d08..3b2e136 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/union3/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
explain
SELECT *
@@ -84,58 +70,62 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10002
- Union
- Select Operator
- expressions:
- expr: _col0
- type: int
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10003
- Union
- Select Operator
- expressions:
- expr: _col0
- type: int
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10005
- Union
- Select Operator
- expressions:
- expr: _col0
- type: int
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10007
- Union
- Select Operator
- expressions:
- expr: _col0
- type: int
- outputColumnNames: _col0
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10002
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10003
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10005
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10007
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: int
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Stage: Stage-3
Map Reduce
@@ -214,7 +204,7 @@ STAGE PLANS:
Stage: Stage-5
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10004
Reduce Output Operator
key expressions:
expr: _col0
@@ -276,7 +266,7 @@ STAGE PLANS:
Stage: Stage-7
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-23-38_342_4585130632229890650/-mr-10006
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-07-02_444_128701493935379567/-mr-10006
Reduce Output Operator
key expressions:
expr: _col0
@@ -327,7 +317,7 @@ FROM (
SELECT 4 AS id
FROM (SELECT * FROM src LIMIT 1) s2
) a
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/union_out
+Deleted /user/hive/warehouse/union_out
select * from union_out cluster by id
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/filter
index 3fd69ab..74e5970 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/filter
@@ -1,3 +1,4 @@
sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
-e 's#Copying file:.*/T..txt#Copying file:TX.txt#' \
- -e '/^Deleted hdfs:/d'
+ -e '/^Deleted hdfs:/d' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out
index 040f6dc..9cfdbbb 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/uniquejoin/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
@@ -20,13 +6,13 @@ CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
LOAD DATA LOCAL INPATH 'seed_data_files/T1.txt' INTO TABLE T1
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/T1.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/T1.txt
LOAD DATA LOCAL INPATH 'seed_data_files/T2.txt' INTO TABLE T2
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/T2.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/T2.txt
LOAD DATA LOCAL INPATH 'seed_data_files/T3.txt' INTO TABLE T3
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/T3.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/T3.txt
FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
[2/5] BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Posted by co...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out
index e31cb88..3691408 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
create table src_multi1 like src
@@ -44,7 +30,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
Select Operator
expressions:
@@ -63,7 +49,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: ((key > 10.0) and (key < 20.0))
type: boolean
Select Operator
expressions:
@@ -113,8 +99,8 @@ STAGE PLANS:
from src
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -152,16 +138,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8
- Stage-9
- Stage-1 depends on stages: Stage-9, Stage-8
- Stage-7 depends on stages: Stage-1
- Stage-8
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
+ Stage-10
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -172,7 +162,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
Select Operator
expressions:
@@ -191,7 +181,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: ((key > 10.0) and (key < 20.0))
type: boolean
Select Operator
expressions:
@@ -209,14 +199,14 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-56-01_262_2997868027895702972/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10000
Stage: Stage-0
Move Operator
@@ -234,7 +224,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-56-01_262_2997868027895702972/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -244,14 +234,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-10
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10004
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-56-01_262_2997868027895702972/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10002
Stage: Stage-1
Move Operator
@@ -263,13 +272,26 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-7
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-8
+ Stage: Stage-10
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-12
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-56-01_262_2997868027895702972/-ext-10005
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -279,14 +301,20 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-46-51_613_5391391039842163435/-ext-10002
+
from src
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -338,7 +366,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
Select Operator
expressions:
@@ -357,7 +385,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: ((key > 10.0) and (key < 20.0))
type: boolean
Select Operator
expressions:
@@ -407,8 +435,8 @@ STAGE PLANS:
from src
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -446,16 +474,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8
- Stage-9
- Stage-1 depends on stages: Stage-9, Stage-8
- Stage-7 depends on stages: Stage-1
- Stage-8
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
+ Stage-10
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -466,7 +498,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
Select Operator
expressions:
@@ -485,7 +517,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: ((key > 10.0) and (key < 20.0))
type: boolean
Select Operator
expressions:
@@ -503,14 +535,14 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-57-18_882_6090120587986446782/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10000
Stage: Stage-0
Move Operator
@@ -528,7 +560,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-57-18_882_6090120587986446782/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -538,14 +570,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-10
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10004
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-57-18_882_6090120587986446782/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10002
Stage: Stage-1
Move Operator
@@ -557,13 +608,26 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-7
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-8
+ Stage: Stage-10
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-12
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-57-18_882_6090120587986446782/-ext-10005
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -573,14 +637,20 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-49-02_277_2523279370671085980/-ext-10002
+
from src
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
select * from src_multi1 order by key, value
@@ -620,9 +690,8 @@ STAGE DEPENDENCIES:
Stage-2 is a root stage
Stage-0 depends on stages: Stage-2
Stage-3 depends on stages: Stage-0
- Stage-4 depends on stages: Stage-2
- Stage-1 depends on stages: Stage-4
- Stage-5 depends on stages: Stage-1
+ Stage-1 depends on stages: Stage-2
+ Stage-4 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-2
@@ -633,7 +702,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: ((key < 10.0) or ((key > 10.0) and (key < 20.0)))
type: boolean
Select Operator
expressions:
@@ -642,79 +711,69 @@ STAGE PLANS:
expr: value
type: string
outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ Reduce Output Operator
+ key expressions:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Filter Operator
- predicate:
- expr: ((key > 10) and (key < 20))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ sort order: ++
+ Map-reduce partition columns:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ tag: -1
Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Forward
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
type: string
- expr: _col1
+ expr: KEY._col1
type: string
+ mode: complete
outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
Stage: Stage-0
Move Operator
@@ -729,49 +788,6 @@ STAGE PLANS:
Stage: Stage-3
Stats-Aggr Operator
- Stage: Stage-4
- Map Reduce
- Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-01_178_8575415668218707873/-mr-10004
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
-
Stage: Stage-1
Move Operator
tables:
@@ -782,7 +798,7 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-5
+ Stage: Stage-4
Stats-Aggr Operator
@@ -791,12 +807,18 @@ STAGE PLANS:
from src
insert overwrite table src_multi1 select * where key < 10 group by key, value
insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
0 val_0
+11 val_11
+12 val_12
+15 val_15
+17 val_17
+18 val_18
+19 val_19
2 val_2
4 val_4
5 val_5
@@ -804,12 +826,18 @@ select * from src_multi1 order by key, value
9 val_9
select * from src_multi2 order by key, value
+0 val_0
11 val_11
12 val_12
15 val_15
17 val_17
18 val_18
19 val_19
+2 val_2
+4 val_4
+5 val_5
+8 val_8
+9 val_9
set hive.merge.mapfiles=false
set hive.merge.mapredfiles=true
@@ -823,17 +851,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-7 depends on stages: Stage-2
- Stage-11 depends on stages: Stage-7 , consists of Stage-10, Stage-9
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
Stage-10
- Stage-1 depends on stages: Stage-10, Stage-9
- Stage-8 depends on stages: Stage-1
- Stage-9
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -844,7 +875,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: ((key < 10.0) or ((key > 10.0) and (key < 20.0)))
type: boolean
Select Operator
expressions:
@@ -853,88 +884,78 @@ STAGE PLANS:
expr: value
type: string
outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ Reduce Output Operator
+ key expressions:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Filter Operator
- predicate:
- expr: ((key > 10) and (key < 20))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ sort order: ++
+ Map-reduce partition columns:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ tag: -1
Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Forward
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
type: string
- expr: _col1
+ expr: KEY._col1
type: string
+ mode: complete
outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-55_556_2585572979058761497/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10000
Stage: Stage-0
Move Operator
@@ -952,7 +973,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-55_556_2585572979058761497/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -962,57 +983,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-7
+ Stage: Stage-6
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-55_556_2585572979058761497/-mr-10005
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10004
File Output Operator
compressed: false
- GlobalTableId: 2
+ GlobalTableId: 0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ name: default.src_multi1
- Stage: Stage-11
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-10
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-55_556_2585572979058761497/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10002
Stage: Stage-1
Move Operator
@@ -1024,13 +1021,13 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-8
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-9
+ Stage: Stage-10
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-58-55_556_2585572979058761497/-ext-10006
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1040,18 +1037,43 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-12
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-51-13_762_9056111985287922984/-ext-10002
+
from src
insert overwrite table src_multi1 select * where key < 10 group by key, value
insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
0 val_0
+11 val_11
+12 val_12
+15 val_15
+17 val_17
+18 val_18
+19 val_19
2 val_2
4 val_4
5 val_5
@@ -1059,12 +1081,18 @@ select * from src_multi1 order by key, value
9 val_9
select * from src_multi2 order by key, value
+0 val_0
11 val_11
12 val_12
15 val_15
17 val_17
18 val_18
19 val_19
+2 val_2
+4 val_4
+5 val_5
+8 val_8
+9 val_9
set hive.merge.mapfiles=true
set hive.merge.mapredfiles=false
@@ -1080,53 +1108,19 @@ STAGE DEPENDENCIES:
Stage-2 is a root stage
Stage-0 depends on stages: Stage-2
Stage-3 depends on stages: Stage-0
- Stage-4 depends on stages: Stage-2
- Stage-1 depends on stages: Stage-4
- Stage-5 depends on stages: Stage-1
+ Stage-1 depends on stages: Stage-2
+ Stage-4 depends on stages: Stage-1
-STAGE PLANS:
- Stage: Stage-2
- Map Reduce
- Alias -> Map Operator Tree:
- src
- TableScan
- alias: src
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
- expr: key
- type: string
- expr: value
- type: string
- mode: hash
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
+STAGE PLANS:
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
Filter Operator
predicate:
- expr: ((key > 10) and (key < 20))
+ expr: ((key < 10.0) or ((key > 10.0) and (key < 20.0)))
type: boolean
Select Operator
expressions:
@@ -1135,46 +1129,69 @@ STAGE PLANS:
expr: value
type: string
outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ tag: -1
Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Forward
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
type: string
- expr: _col1
+ expr: KEY._col1
type: string
+ mode: complete
outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
Stage: Stage-0
Move Operator
@@ -1189,49 +1206,6 @@ STAGE PLANS:
Stage: Stage-3
Stats-Aggr Operator
- Stage: Stage-4
- Map Reduce
- Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_09-59-48_593_2187818685318740583/-mr-10004
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
-
Stage: Stage-1
Move Operator
tables:
@@ -1242,7 +1216,7 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-5
+ Stage: Stage-4
Stats-Aggr Operator
@@ -1251,12 +1225,18 @@ STAGE PLANS:
from src
insert overwrite table src_multi1 select * where key < 10 group by key, value
insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
0 val_0
+11 val_11
+12 val_12
+15 val_15
+17 val_17
+18 val_18
+19 val_19
2 val_2
4 val_4
5 val_5
@@ -1264,12 +1244,18 @@ select * from src_multi1 order by key, value
9 val_9
select * from src_multi2 order by key, value
+0 val_0
11 val_11
12 val_12
15 val_15
17 val_17
18 val_18
19 val_19
+2 val_2
+4 val_4
+5 val_5
+8 val_8
+9 val_9
set hive.merge.mapfiles=true
set hive.merge.mapredfiles=true
@@ -1283,17 +1269,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-7 depends on stages: Stage-2
- Stage-11 depends on stages: Stage-7 , consists of Stage-10, Stage-9
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
Stage-10
- Stage-1 depends on stages: Stage-10, Stage-9
- Stage-8 depends on stages: Stage-1
- Stage-9
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -1304,7 +1293,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key < 10)
+ expr: ((key < 10.0) or ((key > 10.0) and (key < 20.0)))
type: boolean
Select Operator
expressions:
@@ -1313,88 +1302,78 @@ STAGE PLANS:
expr: value
type: string
outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ Reduce Output Operator
+ key expressions:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Filter Operator
- predicate:
- expr: ((key > 10) and (key < 20))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: key, value
- Group By Operator
- bucketGroup: false
- keys:
+ sort order: ++
+ Map-reduce partition columns:
expr: key
type: string
expr: value
type: string
- mode: hash
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ tag: -1
Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Forward
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
type: string
- expr: _col1
+ expr: KEY._col1
type: string
+ mode: complete
outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-00-39_774_4695415401007684016/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10000
Stage: Stage-0
Move Operator
@@ -1412,7 +1391,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-00-39_774_4695415401007684016/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1422,57 +1401,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-7
+ Stage: Stage-6
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-00-39_774_4695415401007684016/-mr-10005
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- sort order: ++
- Map-reduce partition columns:
- expr: _col0
- type: string
- expr: _col1
- type: string
- tag: -1
- Reduce Operator Tree:
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: mergepartial
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10004
File Output Operator
compressed: false
- GlobalTableId: 2
+ GlobalTableId: 0
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ name: default.src_multi1
- Stage: Stage-11
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-10
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-00-39_774_4695415401007684016/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10002
Stage: Stage-1
Move Operator
@@ -1484,13 +1439,26 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-8
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-9
+ Stage: Stage-10
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-12
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-00-39_774_4695415401007684016/-ext-10006
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1500,18 +1468,30 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-53-37_756_7049471615260796328/-ext-10002
+
from src
insert overwrite table src_multi1 select * where key < 10 group by key, value
insert overwrite table src_multi2 select * where key > 10 and key < 20 group by key, value
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
0 val_0
+11 val_11
+12 val_12
+15 val_15
+17 val_17
+18 val_18
+19 val_19
2 val_2
4 val_4
5 val_5
@@ -1519,12 +1499,18 @@ select * from src_multi1 order by key, value
9 val_9
select * from src_multi2 order by key, value
+0 val_0
11 val_11
12 val_12
15 val_15
17 val_17
18 val_18
19 val_19
+2 val_2
+4 val_4
+5 val_5
+8 val_8
+9 val_9
set hive.merge.mapfiles=false
set hive.merge.mapredfiles=false
@@ -1560,7 +1546,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -1579,7 +1565,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -1609,7 +1595,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -1628,7 +1614,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -1678,8 +1664,8 @@ STAGE PLANS:
from (select * from src union all select * from src) s
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -1736,16 +1722,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8
- Stage-9
- Stage-1 depends on stages: Stage-9, Stage-8
- Stage-7 depends on stages: Stage-1
- Stage-8
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
+ Stage-10
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -1764,7 +1754,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -1783,7 +1773,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -1813,7 +1803,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -1832,7 +1822,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -1850,14 +1840,14 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-02-02_968_1192959228674852603/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10000
Stage: Stage-0
Move Operator
@@ -1875,7 +1865,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-02-02_968_1192959228674852603/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1885,14 +1875,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-10
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10004
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-02-02_968_1192959228674852603/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10002
Stage: Stage-1
Move Operator
@@ -1904,13 +1913,26 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-7
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-8
+ Stage: Stage-10
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-12
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-02-02_968_1192959228674852603/-ext-10005
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1920,14 +1942,20 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-55-55_675_1061131400972282069/-ext-10002
+
from (select * from src union all select * from src) s
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -2006,7 +2034,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -2025,7 +2053,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -2055,7 +2083,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -2074,7 +2102,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -2124,8 +2152,8 @@ STAGE PLANS:
from (select * from src union all select * from src) s
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
+Deleted /user/hive/warehouse/src_multi2
select * from src_multi1 order by key, value
@@ -2182,16 +2210,20 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-2 is a root stage
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8
- Stage-9
- Stage-1 depends on stages: Stage-9, Stage-8
- Stage-7 depends on stages: Stage-1
- Stage-8
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-14 depends on stages: Stage-2 , consists of Stage-11, Stage-10, Stage-12
+ Stage-11
+ Stage-1 depends on stages: Stage-11, Stage-10, Stage-13
+ Stage-9 depends on stages: Stage-1
+ Stage-10
+ Stage-12
+ Stage-13 depends on stages: Stage-12
STAGE PLANS:
Stage: Stage-2
@@ -2210,7 +2242,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -2229,7 +2261,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -2259,7 +2291,7 @@ STAGE PLANS:
Union
Filter Operator
predicate:
- expr: (_col0 < 10)
+ expr: (_col0 < 10.0)
type: boolean
Select Operator
expressions:
@@ -2278,7 +2310,7 @@ STAGE PLANS:
name: default.src_multi1
Filter Operator
predicate:
- expr: ((_col0 > 10) and (_col0 < 20))
+ expr: ((_col0 > 10.0) and (_col0 < 20.0))
type: boolean
Select Operator
expressions:
@@ -2296,14 +2328,14 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-03-16_188_2182883420479850051/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10000
Stage: Stage-0
Move Operator
@@ -2321,7 +2353,7 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-03-16_188_2182883420479850051/-ext-10004
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10004
File Output Operator
compressed: false
GlobalTableId: 0
@@ -2331,14 +2363,33 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi1
- Stage: Stage-10
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10004
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+
+ Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10000
+
+ Stage: Stage-14
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-11
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-03-16_188_2182883420479850051/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10002
Stage: Stage-1
Move Operator
@@ -2350,13 +2401,26 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
- Stage: Stage-7
+ Stage: Stage-9
Stats-Aggr Operator
- Stage: Stage-8
+ Stage: Stage-10
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10005
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2
+
+ Stage: Stage-12
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-03-16_188_2182883420479850051/-ext-10005
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10005
File Output Operator
compressed: false
GlobalTableId: 0
@@ -2366,14 +2430,20 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.src_multi2
+ Stage: Stage-13
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-57-55_729_3800954041756388684/-ext-10002
+
from (select * from src union all select * from src) s
insert overwrite table src_multi1 select * where key < 10
insert overwrite table src_multi2 select * where key > 10 and key < 20
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi1
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi2
+Deleted /user/hive/warehouse/src_multi1
select * from src_multi1 order by key, value
@@ -2444,7 +2514,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key = 0)
+ expr: (key = 0.0)
type: boolean
Select Operator
expressions:
@@ -2461,7 +2531,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 2)
+ expr: (key = 2.0)
type: boolean
Select Operator
expressions:
@@ -2478,7 +2548,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 4)
+ expr: (key = 4.0)
type: boolean
Select Operator
expressions:
@@ -2549,7 +2619,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key = 0)
+ expr: (key = 0.0)
type: boolean
Select Operator
expressions:
@@ -2566,7 +2636,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 2)
+ expr: (key = 2.0)
type: boolean
Select Operator
expressions:
@@ -2583,7 +2653,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 4)
+ expr: (key = 4.0)
type: boolean
Select Operator
expressions:
@@ -2655,7 +2725,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key = 0)
+ expr: (key = 0.0)
type: boolean
Select Operator
expressions:
@@ -2672,7 +2742,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 2)
+ expr: (key = 2.0)
type: boolean
Select Operator
expressions:
@@ -2689,7 +2759,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 4)
+ expr: (key = 4.0)
type: boolean
Select Operator
expressions:
@@ -2760,7 +2830,7 @@ STAGE PLANS:
alias: src
Filter Operator
predicate:
- expr: (key = 0)
+ expr: (key = 0.0)
type: boolean
Select Operator
expressions:
@@ -2777,7 +2847,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 2)
+ expr: (key = 2.0)
type: boolean
Select Operator
expressions:
@@ -2794,7 +2864,7 @@ STAGE PLANS:
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Filter Operator
predicate:
- expr: (key = 4)
+ expr: (key = 4.0)
type: boolean
Select Operator
expressions:
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/filter
index 66ff8d0..70f439c 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/filter
@@ -1 +1,2 @@
-sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#'
+sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out
index af70adf..5b07a15 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/rcfile_columnar/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
CREATE table columnTable (key STRING, value STRING)
ROW FORMAT SERDE
@@ -23,11 +9,11 @@ STORED AS
FROM src
INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/columntable
+Deleted /user/hive/warehouse/columntable
describe columnTable
-key string from deserializer
-value string from deserializer
+key string
+value string
SELECT columnTable.* FROM columnTable ORDER BY columnTable.key
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/filter
deleted file mode 100644
index 6f48e87..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/filter
+++ /dev/null
@@ -1,6 +0,0 @@
-sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' |
-sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
- -e 's#owner:[^,]*,#owner:BORG,#' \
- -e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
- -e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
- -e 's#transient_lastDdlTime=[0-9]*#transient_lastDdlTime=JUSTNOW#'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in
deleted file mode 100644
index bafa87e..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/stats8/in
+++ /dev/null
@@ -1,47 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-set datanucleus.cache.collections=false;
-set hive.stats.autogather=false;
-set hive.exec.dynamic.partition=true;
-set hive.exec.dynamic.partition.mode=nonstrict;
-
-create table analyze_srcpart like srcpart;
-insert overwrite table analyze_srcpart partition (ds, hr) select * from srcpart where ds is not null;
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
-analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=11) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart;
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
-analyze table analyze_srcpart PARTITION(ds='2008-04-08',hr=12) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
-analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=11) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
-
-explain analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
-analyze table analyze_srcpart PARTITION(ds='2008-04-09',hr=12) compute statistics;
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
-
-explain analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
-analyze table analyze_srcpart PARTITION(ds, hr) compute statistics;
-
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-08',hr=12);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=11);
-describe extended analyze_srcpart PARTITION(ds='2008-04-09',hr=12);
-describe extended analyze_srcpart;
[4/5] BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Posted by co...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/out
index 3cf11fc..c090210 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/out
@@ -1,66 +1,52 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
CREATE TABLE srcbucket_mapjoin(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
load data local inpath 'seed_data_files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket20.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket20.txt
load data local inpath 'seed_data_files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket21.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket21.txt
CREATE TABLE srcbucket_mapjoin_part (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE
load data local inpath 'seed_data_files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket20.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket20.txt
load data local inpath 'seed_data_files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket21.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket21.txt
load data local inpath 'seed_data_files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket22.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket22.txt
load data local inpath 'seed_data_files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket23.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket23.txt
load data local inpath 'seed_data_files/srcbucket20.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket20.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket20.txt
load data local inpath 'seed_data_files/srcbucket21.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket21.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket21.txt
load data local inpath 'seed_data_files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket22.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket22.txt
load data local inpath 'seed_data_files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket23.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket23.txt
CREATE TABLE srcbucket_mapjoin_part_2 (key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
load data local inpath 'seed_data_files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket22.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket22.txt
load data local inpath 'seed_data_files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-08')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket23.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket23.txt
load data local inpath 'seed_data_files/srcbucket22.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket22.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket22.txt
load data local inpath 'seed_data_files/srcbucket23.txt' INTO TABLE srcbucket_mapjoin_part_2 partition(ds='2008-04-09')
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/srcbucket23.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket23.txt
create table bucketmapjoin_hash_result_1 (key bigint , value1 bigint, value2 bigint)
@@ -80,16 +66,18 @@ ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME srcbucket_mapjoin) a) (TOK_TABREF (TOK_TABNAME srcbucket_mapjoin_part) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucketmapjoin_tmp_result))) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)))))
STAGE DEPENDENCIES:
- Stage-7 is a root stage
- Stage-1 depends on stages: Stage-7
- Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+ Stage-9 is a root stage
+ Stage-1 depends on stages: Stage-9
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
- Stage-0 depends on stages: Stage-4, Stage-3
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-2 depends on stages: Stage-0
Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
STAGE PLANS:
- Stage: Stage-7
+ Stage: Stage-9
Map Reduce Local Work
Alias -> Map Local Tables:
a
@@ -111,18 +99,18 @@ STAGE PLANS:
Position of Big Table: 1
Bucket Mapjoin Context:
Alias Bucket Base File Name Mapping:
- a {srcbucket20.txt=[srcbucket20.txt], srcbucket21.txt=[srcbucket21.txt], srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]}
+ a {ds=2008-04-08/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-08/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-08/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-08/srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket20.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket21.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]}
Alias Bucket File Name Mapping:
- a {hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[hdfs://monster01.sf.cloudera.com:1702
0/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt]}
+ a {hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://mgrover-bigtop-centos-1
.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt]}
Alias Bucket Output File Name Mapping:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket20.txt 0
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket21.txt 1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket22.txt 2
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08/srcbucket23.txt 3
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket20.txt 0
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket21.txt 1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket22.txt 2
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09/srcbucket23.txt 3
Stage: Stage-1
Map Reduce
@@ -164,9 +152,9 @@ STAGE PLANS:
File Output Operator
compressed: false
GlobalTableId: 1
- directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10000/
+ Stats Publishing Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -176,12 +164,12 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691359
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
TotalFiles: 1
@@ -191,10 +179,10 @@ STAGE PLANS:
Map Reduce Local Work
Needs Tagging: false
Path -> Alias:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b]
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08 [b]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09 [b]
Path -> Partition:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -208,13 +196,18 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-08
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691354
+ totalSize 5812
+ transient_lastDdlTime 1366743633
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -226,17 +219,22 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part
name default.srcbucket_mapjoin_part
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691354
+ totalSize 11624
+ transient_lastDdlTime 1366743636
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
name: default.srcbucket_mapjoin_part
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09
Partition
base file name: ds=2008-04-09
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -250,13 +248,18 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part/ds=2008-04-09
name default.srcbucket_mapjoin_part
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691354
+ totalSize 5812
+ transient_lastDdlTime 1366743636
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -268,32 +271,40 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part
name default.srcbucket_mapjoin_part
+ numFiles 8
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691354
+ totalSize 11624
+ transient_lastDdlTime 1366743636
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part
name: default.srcbucket_mapjoin_part
+ Truncated Path -> Alias:
+ /srcbucket_mapjoin_part/ds=2008-04-08 [b]
+ /srcbucket_mapjoin_part/ds=2008-04-09 [b]
- Stage: Stage-5
+ Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
- source: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10000
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000
Stage: Stage-0
Move Operator
tables:
replace: true
- source: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10000
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -303,28 +314,28 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691359
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
- tmp directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10001
+ tmp directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10001
Stage: Stage-2
Stats-Aggr Operator
- Stats Aggregation Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10000/
+ Stats Aggregation Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000/
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
File Output Operator
compressed: false
GlobalTableId: 0
- directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10000
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000
NumFilesPerFileSink: 1
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -335,12 +346,12 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691359
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
TotalFiles: 1
@@ -348,9 +359,9 @@ STAGE PLANS:
MultiFileSpray: false
Needs Tagging: false
Path -> Alias:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002 [hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002]
Path -> Partition:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-55-59_802_8749339940103211895/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
Partition
base file name: -ext-10002
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -361,12 +372,12 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691359
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -377,15 +388,96 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691359
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
name: default.bucketmapjoin_tmp_result
+ Truncated Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002]
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1366743639
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.bucketmapjoin_tmp_result
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Needs Tagging: false
+ Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002]
+ Path -> Partition:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
+ Partition
+ base file name: -ext-10002
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1366743639
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ transient_lastDdlTime 1366743639
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.bucketmapjoin_tmp_result
+ name: default.bucketmapjoin_tmp_result
+ Truncated Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002]
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-00-39_907_3836130325622754442/-ext-10000
@@ -394,7 +486,7 @@ insert overwrite table bucketmapjoin_tmp_result
select /*+mapjoin(a)*/ a.key, a.value, b.value
from srcbucket_mapjoin a join srcbucket_mapjoin_part b
on a.key=b.key
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+Deleted /user/hive/warehouse/bucketmapjoin_tmp_result
select count(1) from bucketmapjoin_tmp_result
@@ -402,14 +494,14 @@ select count(1) from bucketmapjoin_tmp_result
insert overwrite table bucketmapjoin_hash_result_1
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_hash_result_1
+Deleted /user/hive/warehouse/bucketmapjoin_hash_result_1
set hive.optimize.bucketmapjoin = false
insert overwrite table bucketmapjoin_tmp_result
select /*+mapjoin(a)*/ a.key, a.value, b.value
from srcbucket_mapjoin a join srcbucket_mapjoin_part b
on a.key=b.key
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+Deleted /user/hive/warehouse/bucketmapjoin_tmp_result
select count(1) from bucketmapjoin_tmp_result
@@ -417,7 +509,7 @@ select count(1) from bucketmapjoin_tmp_result
insert overwrite table bucketmapjoin_hash_result_2
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_hash_result_2
+Deleted /user/hive/warehouse/bucketmapjoin_hash_result_2
select a.key-b.key, a.value1-b.value1, a.value2-b.value2
@@ -435,16 +527,18 @@ ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME srcbucket_mapjoin) a) (TOK_TABREF (TOK_TABNAME srcbucket_mapjoin_part_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucketmapjoin_tmp_result))) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)))))
STAGE DEPENDENCIES:
- Stage-7 is a root stage
- Stage-1 depends on stages: Stage-7
- Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+ Stage-9 is a root stage
+ Stage-1 depends on stages: Stage-9
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
- Stage-0 depends on stages: Stage-4, Stage-3
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-2 depends on stages: Stage-0
Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
STAGE PLANS:
- Stage: Stage-7
+ Stage: Stage-9
Map Reduce Local Work
Alias -> Map Local Tables:
a
@@ -466,14 +560,14 @@ STAGE PLANS:
Position of Big Table: 1
Bucket Mapjoin Context:
Alias Bucket Base File Name Mapping:
- a {srcbucket22.txt=[srcbucket20.txt], srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]}
+ a {ds=2008-04-08/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-08/srcbucket23.txt=[srcbucket21.txt], ds=2008-04-09/srcbucket22.txt=[srcbucket20.txt], ds=2008-04-09/srcbucket23.txt=[srcbucket21.txt]}
Alias Bucket File Name Mapping:
- a {hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt]}
+ a {hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket20.txt], hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt=[hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin/srcbucket21.txt]}
Alias Bucket Output File Name Mapping:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket22.txt 0
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08/srcbucket23.txt 1
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket22.txt 0
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09/srcbucket23.txt 1
Stage: Stage-1
Map Reduce
@@ -515,9 +609,9 @@ STAGE PLANS:
File Output Operator
compressed: false
GlobalTableId: 1
- directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10000/
+ Stats Publishing Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -527,16 +621,17 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
numFiles 1
numPartitions 0
numRows 0
+ rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17966
- transient_lastDdlTime 1301691422
+ transient_lastDdlTime 1366743737
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
TotalFiles: 1
@@ -546,10 +641,10 @@ STAGE PLANS:
Map Reduce Local Work
Needs Tagging: false
Path -> Alias:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b]
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08 [b]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09 [b]
Path -> Partition:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08
Partition
base file name: ds=2008-04-08
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -563,13 +658,18 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-08
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691357
+ totalSize 3062
+ transient_lastDdlTime 1366743638
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -581,17 +681,22 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2
name default.srcbucket_mapjoin_part_2
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691357
+ totalSize 6124
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
name: default.srcbucket_mapjoin_part_2
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09
Partition
base file name: ds=2008-04-09
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -605,13 +710,18 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2/ds=2008-04-09
name default.srcbucket_mapjoin_part_2
+ numFiles 2
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691357
+ totalSize 3062
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -623,32 +733,40 @@ STAGE PLANS:
columns.types int:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcbucket_mapjoin_part_2
name default.srcbucket_mapjoin_part_2
+ numFiles 4
+ numPartitions 2
+ numRows 0
partition_columns ds
+ rawDataSize 0
serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301691357
+ totalSize 6124
+ transient_lastDdlTime 1366743639
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcbucket_mapjoin_part_2
name: default.srcbucket_mapjoin_part_2
+ Truncated Path -> Alias:
+ /srcbucket_mapjoin_part_2/ds=2008-04-08 [b]
+ /srcbucket_mapjoin_part_2/ds=2008-04-09 [b]
- Stage: Stage-5
+ Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
- source: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10000
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000
Stage: Stage-0
Move Operator
tables:
replace: true
- source: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10000
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -658,32 +776,33 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
numFiles 1
numPartitions 0
numRows 0
+ rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17966
- transient_lastDdlTime 1301691422
+ transient_lastDdlTime 1366743737
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
- tmp directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10001
+ tmp directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10001
Stage: Stage-2
Stats-Aggr Operator
- Stats Aggregation Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10000/
+ Stats Aggregation Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000/
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
File Output Operator
compressed: false
GlobalTableId: 0
- directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10000
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000
NumFilesPerFileSink: 1
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -694,16 +813,17 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
numFiles 1
numPartitions 0
numRows 0
+ rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17966
- transient_lastDdlTime 1301691422
+ transient_lastDdlTime 1366743737
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
TotalFiles: 1
@@ -711,9 +831,9 @@ STAGE PLANS:
MultiFileSpray: false
Needs Tagging: false
Path -> Alias:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002 [hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002]
Path -> Partition:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_13-57-41_645_9053634577540746990/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
Partition
base file name: -ext-10002
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -724,16 +844,17 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
numFiles 1
numPartitions 0
numRows 0
+ rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17966
- transient_lastDdlTime 1301691422
+ transient_lastDdlTime 1366743737
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -744,19 +865,116 @@ STAGE PLANS:
columns.types string:string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
name default.bucketmapjoin_tmp_result
numFiles 1
numPartitions 0
numRows 0
+ rawDataSize 0
serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
totalSize 17966
- transient_lastDdlTime 1301691422
+ transient_lastDdlTime 1366743737
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.bucketmapjoin_tmp_result
name: default.bucketmapjoin_tmp_result
+ Truncated Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002]
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000
+ NumFilesPerFileSink: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 17966
+ transient_lastDdlTime 1366743737
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.bucketmapjoin_tmp_result
+ TotalFiles: 1
+ GatherStats: false
+ MultiFileSpray: false
+ Needs Tagging: false
+ Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002]
+ Path -> Partition:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
+ Partition
+ base file name: -ext-10002
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 17966
+ transient_lastDdlTime 1366743737
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ properties:
+ bucket_count -1
+ columns key,value1,value2
+ columns.types string:string:string
+ file.inputformat org.apache.hadoop.mapred.TextInputFormat
+ file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_tmp_result
+ name default.bucketmapjoin_tmp_result
+ numFiles 1
+ numPartitions 0
+ numRows 0
+ rawDataSize 0
+ serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2}
+ serialization.format 1
+ serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ totalSize 17966
+ transient_lastDdlTime 1366743737
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.bucketmapjoin_tmp_result
+ name: default.bucketmapjoin_tmp_result
+ Truncated Path -> Alias:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002 [hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002]
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10002
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_12-03-31_286_2769775196968737402/-ext-10000
@@ -765,31 +983,33 @@ insert overwrite table bucketmapjoin_tmp_result
select /*+mapjoin(a)*/ a.key, a.value, b.value
from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b
on a.key=b.key
+Deleted /user/hive/warehouse/bucketmapjoin_tmp_result
select count(1) from bucketmapjoin_tmp_result
-928
+0
insert overwrite table bucketmapjoin_hash_result_1
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_hash_result_1
+Deleted /user/hive/warehouse/bucketmapjoin_hash_result_1
set hive.optimize.bucketmapjoin = false
insert overwrite table bucketmapjoin_tmp_result
select /*+mapjoin(a)*/ a.key, a.value, b.value
from srcbucket_mapjoin a join srcbucket_mapjoin_part_2 b
on a.key=b.key
+Deleted /user/hive/warehouse/bucketmapjoin_tmp_result
select count(1) from bucketmapjoin_tmp_result
-928
+0
insert overwrite table bucketmapjoin_hash_result_2
select sum(hash(key)), sum(hash(value1)), sum(hash(value2)) from bucketmapjoin_tmp_result
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/bucketmapjoin_hash_result_2
+Deleted /user/hive/warehouse/bucketmapjoin_hash_result_2
select a.key-b.key, a.value1-b.value1, a.value2-b.value2
from bucketmapjoin_hash_result_1 a left outer join bucketmapjoin_hash_result_2 b
on a.key = b.key
-0 0 0
+NULL NULL NULL
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/drop_multi_partitions/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/drop_multi_partitions/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/drop_multi_partitions/out
index db1af12..edc8e7c 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/drop_multi_partitions/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/drop_multi_partitions/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
create table mp (a string) partitioned by (b string, c string)
@@ -30,7 +16,7 @@ b=2/c=2
explain extended alter table mp drop partition (b='1')
ABSTRACT SYNTAX TREE:
- (TOK_ALTERTABLE_DROPPARTS mp (TOK_PARTSPEC (TOK_PARTVAL b '1')))
+ (TOK_ALTERTABLE_DROPPARTS mp (TOK_PARTSPEC (TOK_PARTVAL b = '1')))
STAGE DEPENDENCIES:
Stage-0 is a root stage
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter
index d48cc46..ff8759e 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/filter
@@ -1,3 +1,5 @@
sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
-e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
- -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#'
+ -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/out
index 00e923c..a4dac68 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/groupby_map_ppr_multi_distinct/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
set hive.map.aggr=true
set hive.groupby.skewindata=false
set mapred.reduce.tasks=31
@@ -42,62 +28,57 @@ STAGE PLANS:
TableScan
alias: src
GatherStats: false
- Filter Operator
- isSamplingPred: false
- predicate:
- expr: (ds = '2008-04-08')
- type: boolean
- Select Operator
- expressions:
- expr: key
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: key, value
+ Group By Operator
+ aggregations:
+ expr: count(DISTINCT substr(value, 5))
+ expr: sum(substr(value, 5))
+ expr: sum(DISTINCT substr(value, 5))
+ expr: count(DISTINCT value)
+ bucketGroup: false
+ keys:
+ expr: substr(key, 1, 1)
+ type: string
+ expr: substr(value, 5)
type: string
expr: value
type: string
- outputColumnNames: key, value
- Group By Operator
- aggregations:
- expr: count(DISTINCT substr(value, 5))
- expr: sum(substr(value, 5))
- expr: sum(DISTINCT substr(value, 5))
- expr: count(DISTINCT value)
- bucketGroup: false
- keys:
- expr: substr(key, 1, 1)
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
type: string
- expr: substr(value, 5)
+ expr: _col2
type: string
- expr: value
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
type: string
- mode: hash
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col2
- type: string
- sort order: +++
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: -1
- value expressions:
- expr: _col3
- type: bigint
- expr: _col4
- type: double
- expr: _col5
- type: double
- expr: _col6
- type: bigint
+ tag: -1
+ value expressions:
+ expr: _col3
+ type: bigint
+ expr: _col4
+ type: double
+ expr: _col5
+ type: double
+ expr: _col6
+ type: bigint
Needs Tagging: false
Path -> Alias:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11 [src]
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12 [src]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11 [src]
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12 [src]
Path -> Partition:
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11
Partition
base file name: hr=11
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -111,13 +92,18 @@ STAGE PLANS:
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=11
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301676822
+ totalSize 5812
+ transient_lastDdlTime 1366737715
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -128,17 +114,22 @@ STAGE PLANS:
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301676822
+ totalSize 23248
+ transient_lastDdlTime 1366737718
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
- hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12
Partition
base file name: hr=12
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -152,13 +143,18 @@ STAGE PLANS:
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart/ds=2008-04-08/hr=12
name default.srcpart
+ numFiles 1
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301676822
+ totalSize 5812
+ transient_lastDdlTime 1366737716
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -169,13 +165,18 @@ STAGE PLANS:
columns.types string:string
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/srcpart
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/srcpart
name default.srcpart
+ numFiles 4
+ numPartitions 4
+ numRows 0
partition_columns ds/hr
+ rawDataSize 0
serialization.ddl struct srcpart { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301676822
+ totalSize 23248
+ transient_lastDdlTime 1366737718
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.srcpart
name: default.srcpart
@@ -221,9 +222,9 @@ STAGE PLANS:
File Output Operator
compressed: false
GlobalTableId: 1
- directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-08-20_880_8656529853875888350/-ext-10000
+ directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-34-17_553_86036101665952261/-ext-10000
NumFilesPerFileSink: 1
- Stats Publishing Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-08-20_880_8656529853875888350/-ext-10000/
+ Stats Publishing Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-34-17_553_86036101665952261/-ext-10000/
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -233,23 +234,26 @@ STAGE PLANS:
columns.types string:int:string:int:int
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/dest1
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/dest1
name default.dest1
serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301677700
+ transient_lastDdlTime 1366742057
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1
TotalFiles: 1
GatherStats: true
MultiFileSpray: false
+ Truncated Path -> Alias:
+ /srcpart/ds=2008-04-08/hr=11 [src]
+ /srcpart/ds=2008-04-08/hr=12 [src]
Stage: Stage-0
Move Operator
tables:
replace: true
- source: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-08-20_880_8656529853875888350/-ext-10000
+ source: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-34-17_553_86036101665952261/-ext-10000
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -259,19 +263,19 @@ STAGE PLANS:
columns.types string:int:string:int:int
file.inputformat org.apache.hadoop.mapred.TextInputFormat
file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- location hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/dest1
+ location hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/dest1
name default.dest1
serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- transient_lastDdlTime 1301677700
+ transient_lastDdlTime 1366742057
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest1
- tmp directory: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-08-20_880_8656529853875888350/-ext-10001
+ tmp directory: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-34-17_553_86036101665952261/-ext-10001
Stage: Stage-2
Stats-Aggr Operator
- Stats Aggregation Key Prefix: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-08-20_880_8656529853875888350/-ext-10000/
+ Stats Aggregation Key Prefix: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-34-17_553_86036101665952261/-ext-10000/
@@ -281,7 +285,7 @@ INSERT OVERWRITE TABLE dest1
SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(DISTINCT src.value)
WHERE src.ds = '2008-04-08'
GROUP BY substr(src.key,1,1)
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/dest1
+Deleted /user/hive/warehouse/dest1
SELECT dest1.* FROM dest1
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/filter
index 51ff5fc..2aa66f0 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/filter
@@ -3,4 +3,6 @@ sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
-e 's#owner:[^,]*,#owner:BORG,#' \
-e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
-e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
- -e 's#transient_lastDdlTime=[0-9]*}#transient_lastDdlTime=JUSTNOW}#'
+ -e 's#transient_lastDdlTime=[0-9]*}#transient_lastDdlTime=JUSTNOW}#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
+
[3/5] BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Posted by co...@apache.org.
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/out
index b6f8453..4d81060 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/index_creation/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
drop index src_index_2 on src
drop index src_index_3 on src
@@ -38,7 +24,7 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_2__, dbName:default, owner:null, createTime:1301677813, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_2__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1301677813}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:default__src_src_index_2__, dbName:default, owner:null, createTime:1366741979, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_2__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{transient_lastDdlTime=1366741
979}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create index src_index_3 on table src(key) as 'compact' WITH DEFERRED REBUILD in table src_idx_src_index_3
@@ -48,7 +34,7 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:src_idx_src_index_3, dbName:default, owner:null, createTime:1301677813, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_idx_src_index_3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1301677813}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:src_idx_src_index_3, dbName:default, owner:null, createTime:1366741980, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/src_idx_src_index_3, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{transient_lastDdlTime=1366741980}, viewOrig
inalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create index src_index_4 on table src(key) as 'compact' WITH DEFERRED REBUILD ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
@@ -58,7 +44,7 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_4__, dbName:default, owner:null, createTime:1301677814, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_4__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format= , field.delim=
+Detailed Table Information Table(tableName:default__src_src_index_4__, dbName:default, owner:null, createTime:1366741981, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_4__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format= , field.delim=
create index src_index_5 on table src(key) as 'compact' WITH DEFERRED REBUILD ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ESCAPED BY '\\'
@@ -68,27 +54,27 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_5__, dbName:default, owner:null, createTime:1301677814, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_5__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{escape.delim=\, serialization.format= , field.delim=
+Detailed Table Information Table(tableName:default__src_src_index_5__, dbName:default, owner:null, createTime:1366741982, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_5__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{escape.delim=\, serialization.format= , field.delim=
create index src_index_6 on table src(key) as 'compact' WITH DEFERRED REBUILD STORED AS RCFILE
desc extended default__src_src_index_6__
-key string from deserializer
-_bucketname string from deserializer
-_offsets array<bigint> from deserializer
+key string
+_bucketname string
+_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_6__, dbName:default, owner:null, createTime:1301677814, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_6__, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1301677814}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:default__src_src_index_6__, dbName:default, owner:null, createTime:1366741982, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_6__, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{transient_lastDdlTime=136674
1982}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create index src_index_7 on table src(key) as 'compact' WITH DEFERRED REBUILD in table src_idx_src_index_7 STORED AS RCFILE
desc extended src_idx_src_index_7
-key string from deserializer
-_bucketname string from deserializer
-_offsets array<bigint> from deserializer
+key string
+_bucketname string
+_offsets array<bigint>
-Detailed Table Information Table(tableName:src_idx_src_index_7, dbName:default, owner:null, createTime:1301677815, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/src_idx_src_index_7, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1301677815}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:src_idx_src_index_7, dbName:default, owner:null, createTime:1366741983, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/src_idx_src_index_7, inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{transient_lastDdlTime=1366741983}, viewOri
ginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create index src_index_8 on table src(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2")
@@ -98,7 +84,7 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_8__, dbName:default, owner:null, createTime:1301677815, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_8__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1301677815}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:default__src_src_index_8__, dbName:default, owner:null, createTime:1366741983, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_8__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{transient_lastDdlTime=1366741
983}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create index src_index_9 on table src(key) as 'compact' WITH DEFERRED REBUILD TBLPROPERTIES ("prop1"="val1", "prop2"="val2")
@@ -108,7 +94,7 @@ key string
_bucketname string
_offsets array<bigint>
-Detailed Table Information Table(tableName:default__src_src_index_9__, dbName:default, owner:null, createTime:1301677815, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default__src_src_index_9__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}), partitionKeys:[], parameters:{prop2=val2, prop1=val1, transient_lastDdlTime=1301677815}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
+Detailed Table Information Table(tableName:default__src_src_index_9__, dbName:default, owner:null, createTime:1366741984, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:_bucketname, type:string, comment:), FieldSchema(name:_offsets, type:array<bigint>, comment:)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/default__src_src_index_9__, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[Order(col:key, order:1)], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[], parameters:{prop2=val2, prop1=val1, trans
ient_lastDdlTime=1366741984}, viewOriginalText:null, viewExpandedText:null, tableType:INDEX_TABLE)
create table `_t`(`_i` int, `_j` int)
@@ -116,14 +102,14 @@ create table `_t`(`_i` int, `_j` int)
create index x on table `_t`(`_j`) as 'compact' WITH DEFERRED REBUILD
alter index x on `_t` rebuild
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default___t_x__
+Deleted /user/hive/warehouse/default___t_x__
create index x2 on table `_t`(`_i`,`_j`) as 'compact' WITH DEFERRED
REBUILD
alter index x2 on `_t` rebuild
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/default___t_x2__
+Deleted /user/hive/warehouse/default___t_x2__
drop index src_index_2 on src
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/filter
new file mode 100644
index 0000000..574efbd
--- /dev/null
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/filter
@@ -0,0 +1 @@
+sed -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/out
index 23cf4cb..d7aa686 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join19/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
CREATE TABLE triples (foo string, subject string, predicate string, object string, foo2 string)
@@ -90,27 +76,23 @@ STAGE PLANS:
predicate:
expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation'))
type: boolean
- Filter Operator
- predicate:
- expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation'))
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ outputColumnNames: _col0
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: 0
+ value expressions:
+ expr: _col0
type: string
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: 0
- value expressions:
- expr: _col0
- type: string
t22:t2
TableScan
alias: t2
@@ -118,29 +100,25 @@ STAGE PLANS:
predicate:
expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')
type: boolean
- Filter Operator
- predicate:
- expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ expr: object
+ type: string
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
type: string
- expr: object
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: 1
+ value expressions:
+ expr: _col1
type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: 1
- value expressions:
- expr: _col1
- type: string
t33:t3
TableScan
alias: t3
@@ -148,29 +126,25 @@ STAGE PLANS:
predicate:
expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from')
type: boolean
- Filter Operator
- predicate:
- expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from')
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ expr: object
+ type: string
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ key expressions:
+ expr: _col1
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col1
type: string
- expr: object
+ tag: 2
+ value expressions:
+ expr: _col0
type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col1
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col1
- type: string
- tag: 2
- value expressions:
- expr: _col0
- type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -216,24 +190,20 @@ STAGE PLANS:
predicate:
expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author'))
type: boolean
- Filter Operator
- predicate:
- expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author'))
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ outputColumnNames: _col0
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
type: string
- outputColumnNames: _col0
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: 1
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: 1
t55:t5
TableScan
alias: t5
@@ -241,29 +211,25 @@ STAGE PLANS:
predicate:
expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to')
type: boolean
- Filter Operator
- predicate:
- expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to')
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ expr: object
+ type: string
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
type: string
- expr: object
+ tag: 2
+ value expressions:
+ expr: _col1
type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: 2
- value expressions:
- expr: _col1
- type: string
Reduce Operator Tree:
Join Operator
condition map:
@@ -311,29 +277,25 @@ STAGE PLANS:
predicate:
expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')
type: boolean
- Filter Operator
- predicate:
- expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL')
- type: boolean
- Select Operator
- expressions:
- expr: subject
+ Select Operator
+ expressions:
+ expr: subject
+ type: string
+ expr: object
+ type: string
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
type: string
- expr: object
+ tag: 1
+ value expressions:
+ expr: _col1
type: string
- outputColumnNames: _col0, _col1
- Reduce Output Operator
- key expressions:
- expr: _col0
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: _col0
- type: string
- tag: 1
- value expressions:
- expr: _col1
- type: string
Reduce Operator Tree:
Join Operator
condition map:
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/filter
index 262105a..9cd7f38 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/filter
@@ -1 +1,2 @@
-sed -e 's#Copying file:.*/in..txt#Copying file:inX.txt#'
+sed -e 's#Copying file:.*/in..txt#Copying file:inX.txt#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/out
index ce5b39b..fc08ff2 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/join_filters/out
@@ -1,21 +1,7 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
CREATE TABLE myinput1(key int, value int)
LOAD DATA LOCAL INPATH 'seed_data_files/in3.txt' INTO TABLE myinput1
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/in3.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/in3.txt
SELECT * FROM myinput1 a JOIN myinput1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value
@@ -276,16 +262,16 @@ CREATE TABLE smb_input1(key int, value int) CLUSTERED BY (key) SORTED BY (key) I
CREATE TABLE smb_input2(key int, value int) CLUSTERED BY (value) SORTED BY (value) INTO 2 BUCKETS
LOAD DATA LOCAL INPATH 'seed_data_files/in1.txt' into table smb_input1
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/in1.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/in1.txt
LOAD DATA LOCAL INPATH 'seed_data_files/in2.txt' into table smb_input1
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/in2.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/in2.txt
LOAD DATA LOCAL INPATH 'seed_data_files/in1.txt' into table smb_input2
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/in1.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/in1.txt
LOAD DATA LOCAL INPATH 'seed_data_files/in2.txt' into table smb_input2
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/in2.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/in2.txt
SET hive.optimize.bucketmapjoin = true
SET hive.optimize.bucketmapjoin.sortedmerge = true
SET hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/filter
index ee0080a..a855032 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/filter
@@ -1,5 +1,7 @@
-sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' |
+sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' |
sed -e 's#owner:[^,]*,#owner:BORG,#' \
-e 's#createTime:[0-9]*,#createTime:JUSTNOW#' \
-e 's#location:hdfs://[^/]*/#location:hdfs://HADOOP/#' \
- -e 's#{transient_lastDdlTime=[0-9]*}#{transient_lastDdlTime=JUSTNOW}#'
+ -e 's#{transient_lastDdlTime=[0-9]*}#{transient_lastDdlTime=JUSTNOW}#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/out
index 0b3a10e..72bfdb1 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/load_dyn_part14/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
create table if not exists nzhang_part14 (key string)
partitioned by (value string)
@@ -21,7 +7,7 @@ describe extended nzhang_part14
key string
value string
-Detailed Table Information Table(tableName:nzhang_part14, dbName:default, owner:hudson, createTime:1301677589, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/nzhang_part14, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:value, type:string, comment:null)], parameters:{transient_lastDdlTime=1301677589}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
+Detailed Table Information Table(tableName:nzhang_part14, dbName:default, owner:root, createTime:1366741768, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/nzhang_part14, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{}), storedAsSubDirectories:false), partitionKeys:[FieldSchema(name:value, type:string, comment:null)], parameters:{transient_lastDdlTime=1366741768}, viewOriginalText:null, viewExpandedText:null, tab
leType:MANAGED_TABLE)
set hive.exec.dynamic.partition=true
set hive.exec.dynamic.partition.mode=nonstrict
@@ -40,14 +26,16 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-1 is a root stage
- Stage-2 depends on stages: Stage-1, Stage-7, Stage-8
- Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4
+ Stage-2 depends on stages: Stage-1, Stage-9, Stage-10
+ Stage-8 depends on stages: Stage-2 , consists of Stage-5, Stage-4, Stage-6
Stage-5
- Stage-0 depends on stages: Stage-5, Stage-4
+ Stage-0 depends on stages: Stage-5, Stage-4, Stage-7
Stage-3 depends on stages: Stage-0
Stage-4
- Stage-7 is a root stage
- Stage-8 is a root stage
+ Stage-6
+ Stage-7 depends on stages: Stage-6
+ Stage-9 is a root stage
+ Stage-10 is a root stage
STAGE PLANS:
Stage: Stage-1
@@ -85,66 +73,69 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-06-30_831_1767612874675998584/-mr-10002
- Union
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.nzhang_part14
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-06-30_831_1767612874675998584/-mr-10004
- Union
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.nzhang_part14
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-06-30_831_1767612874675998584/-mr-10005
- Union
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.nzhang_part14
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-mr-10002
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.nzhang_part14
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-mr-10004
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.nzhang_part14
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-mr-10005
+ TableScan
+ Union
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.nzhang_part14
- Stage: Stage-6
+ Stage: Stage-8
Conditional Operator
Stage: Stage-5
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-06-30_831_1767612874675998584/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-ext-10000
Stage: Stage-0
Move Operator
@@ -164,7 +155,20 @@ STAGE PLANS:
Stage: Stage-4
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-06-30_831_1767612874675998584/-ext-10003
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-ext-10003
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.nzhang_part14
+
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-ext-10003
File Output Operator
compressed: false
GlobalTableId: 0
@@ -175,6 +179,12 @@ STAGE PLANS:
name: default.nzhang_part14
Stage: Stage-7
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-29-30_022_7780088645980753198/-ext-10000
+
+ Stage: Stage-9
Map Reduce
Alias -> Map Operator Tree:
null-subquery2:t-subquery2:src
@@ -206,7 +216,7 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Stage: Stage-8
+ Stage: Stage-10
Map Reduce
Alias -> Map Operator Tree:
null-subquery1-subquery1:t-subquery1-subquery1:src
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/filter
index 6eaaaef..45a1fc5 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/filter
@@ -2,4 +2,6 @@ sed -re 's#Copying file:.*/srcbucket#Copying file:srcbucket#' \
-e 's#^owner:.*$#owner:BORG#' \
-e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
-e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
- -e 's#last(Access|Update)Time:[0-9]*#last\1Time:JUSTNOW#'
+ -e 's#last(Access|Update)Time:[0-9]*#last\1Time:JUSTNOW#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/out
index 2056646..d5402b8 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/merge_dynamic_partition/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
set hive.exec.dynamic.partition=true
set hive.exec.dynamic.partition.mode=nonstrict
@@ -23,16 +9,16 @@ create table merge_dynamic_part like srcpart
load data local inpath 'seed_data_files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/srcbucket20.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket20.txt
load data local inpath 'seed_data_files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/srcbucket21.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket21.txt
load data local inpath 'seed_data_files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/srcbucket22.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket22.txt
load data local inpath 'seed_data_files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/srcbucket23.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/srcbucket23.txt
set hive.input.format=org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat
set hive.merge.mapfiles=false
set hive.merge.mapredfiles=false
@@ -55,27 +41,23 @@ STAGE PLANS:
srcpart_merge_dp
TableScan
alias: srcpart_merge_dp
- Filter Operator
- predicate:
- expr: (ds = '2008-04-08')
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: hr
- type: string
- outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.merge_dynamic_part
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: hr
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.merge_dynamic_part
Stage: Stage-0
Move Operator
@@ -602,8 +584,8 @@ select * from merge_dynamic_part order by key, value
show table extended like `merge_dynamic_part`
tableName:merge_dynamic_part
-owner:hudson
-location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
+owner:root
+location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
inputformat:org.apache.hadoop.mapred.TextInputFormat
outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { string key, string value}
@@ -613,8 +595,8 @@ totalNumberFiles:4
totalFileSize:5812
maxFileSize:1612
minFileSize:1358
-lastAccessTime:1301677489601
-lastUpdateTime:1301677491944
+lastAccessTime:1366742356513
+lastUpdateTime:1366742358466
set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
set hive.merge.mapfiles=true
@@ -628,11 +610,13 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-1 is a root stage
- Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
- Stage-0 depends on stages: Stage-4, Stage-3
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-2 depends on stages: Stage-0
Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-1
@@ -641,34 +625,30 @@ STAGE PLANS:
srcpart_merge_dp
TableScan
alias: srcpart_merge_dp
- Filter Operator
- predicate:
- expr: (ds = '2008-04-08')
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.merge_dynamic_part
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.merge_dynamic_part
- Stage: Stage-5
+ Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-05-06_041_3360829702411135093/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-39-51_354_5074906116987257301/-ext-10000
Stage: Stage-0
Move Operator
@@ -689,7 +669,7 @@ STAGE PLANS:
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-05-06_041_3360829702411135093/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-39-51_354_5074906116987257301/-ext-10002
File Output Operator
compressed: false
GlobalTableId: 0
@@ -699,10 +679,29 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.merge_dynamic_part
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-39-51_354_5074906116987257301/-ext-10002
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.merge_dynamic_part
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-39-51_354_5074906116987257301/-ext-10000
+
insert overwrite table merge_dynamic_part partition (ds='2008-04-08', hr=11) select key, value from srcpart_merge_dp where ds='2008-04-08'
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part/ds=2008-04-08/hr=11
+Deleted hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part/ds=2008-04-08/hr=11
select * from merge_dynamic_part order by key, value
@@ -1209,8 +1208,8 @@ select * from merge_dynamic_part order by key, value
show table extended like `merge_dynamic_part`
tableName:merge_dynamic_part
-owner:hudson
-location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
+owner:root
+location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
inputformat:org.apache.hadoop.mapred.TextInputFormat
outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { string key, string value}
@@ -1220,8 +1219,8 @@ totalNumberFiles:1
totalFileSize:5812
maxFileSize:5812
minFileSize:5812
-lastAccessTime:1301677509720
-lastUpdateTime:1301677509720
+lastAccessTime:1366742405322
+lastUpdateTime:1366742405493
set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
set hive.merge.mapfiles=true
@@ -1235,11 +1234,13 @@ ABSTRACT SYNTAX TREE:
STAGE DEPENDENCIES:
Stage-1 is a root stage
- Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
Stage-4
- Stage-0 depends on stages: Stage-4, Stage-3
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
Stage-2 depends on stages: Stage-0
Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
STAGE PLANS:
Stage: Stage-1
@@ -1248,38 +1249,34 @@ STAGE PLANS:
srcpart_merge_dp
TableScan
alias: srcpart_merge_dp
- Filter Operator
- predicate:
- expr: ((ds = '2008-04-08') and (hr = 11))
- type: boolean
- Select Operator
- expressions:
- expr: key
- type: string
- expr: value
- type: string
- expr: ds
- type: string
- expr: hr
- type: string
- outputColumnNames: _col0, _col1, _col2, _col3
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.merge_dynamic_part
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
+ expr: hr
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.merge_dynamic_part
- Stage: Stage-5
+ Stage: Stage-7
Conditional Operator
Stage: Stage-4
Move Operator
files:
hdfs directory: true
- destination: hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-05-26_242_8654647266048835937/-ext-10000
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-40-31_143_7299434649267988331/-ext-10000
Stage: Stage-0
Move Operator
@@ -1300,7 +1297,7 @@ STAGE PLANS:
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-05-26_242_8654647266048835937/-ext-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-40-31_143_7299434649267988331/-ext-10002
File Output Operator
compressed: false
GlobalTableId: 0
@@ -1310,10 +1307,29 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.merge_dynamic_part
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-40-31_143_7299434649267988331/-ext-10002
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.merge_dynamic_part
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-40-31_143_7299434649267988331/-ext-10000
+
insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds='2008-04-08' and hr=11
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part/ds=2008-04-08/hr=11
+Deleted hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part/ds=2008-04-08/hr=11
select * from merge_dynamic_part order by key, value
@@ -1820,8 +1836,8 @@ select * from merge_dynamic_part order by key, value
show table extended like `merge_dynamic_part`
tableName:merge_dynamic_part
-owner:hudson
-location:hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
+owner:root
+location:hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/user/hive/warehouse/merge_dynamic_part
inputformat:org.apache.hadoop.mapred.TextInputFormat
outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { string key, string value}
@@ -1831,6 +1847,6 @@ totalNumberFiles:1
totalFileSize:5812
maxFileSize:5812
minFileSize:5812
-lastAccessTime:1301677529792
-lastUpdateTime:1301677529792
+lastAccessTime:1366742447479
+lastUpdateTime:1366742447704
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/filter
index 2779eff..f26b805 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/multi_insert/filter
@@ -1,3 +1,6 @@
sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
-e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
- -e 's#Deleted hdfs://.*src_multi.$#Deleted hdfs://src_multi#'
+ -e 's#Deleted .*/.*src_multi.$#Deleted /src_multi#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
+
[5/5] git commit: BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Posted by co...@apache.org.
BIGTOP-885. TestHiveSmokeBulk fails on Hive 0.9
Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/8d32a92d
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/8d32a92d
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/8d32a92d
Branch: refs/heads/master
Commit: 8d32a92d04a4aeca41b56dc67f5bf4832eec392b
Parents: 7b2f77d
Author: Konstantin Boudnik <co...@apache.org>
Authored: Wed Apr 24 13:29:54 2013 -0700
Committer: Konstantin Boudnik <co...@apache.org>
Committed: Wed Apr 24 13:29:54 2013 -0700
----------------------------------------------------------------------
.../resources/scripts/ql/authorization_2/filter | 2 -
.../main/resources/scripts/ql/authorization_2/in | 123 --
.../main/resources/scripts/ql/authorization_2/out | 59 -
.../main/resources/scripts/ql/auto_join20/filter | 4 +-
.../src/main/resources/scripts/ql/auto_join20/out | 632 +++-----
.../src/main/resources/scripts/ql/basic/filter | 3 +-
.../hive/src/main/resources/scripts/ql/basic/out | 18 +-
.../scripts/ql/bucketizedhiveinputformat/filter | 4 +-
.../scripts/ql/bucketizedhiveinputformat/out | 24 +-
.../resources/scripts/ql/bucketmapjoin5/filter | 4 +-
.../main/resources/scripts/ql/bucketmapjoin5/out | 482 ++++--
.../resources/scripts/ql/drop_multi_partitions/out | 16 +-
.../ql/groupby_map_ppr_multi_distinct/filter | 4 +-
.../scripts/ql/groupby_map_ppr_multi_distinct/out | 164 +-
.../resources/scripts/ql/index_creation/filter | 4 +-
.../main/resources/scripts/ql/index_creation/out | 46 +-
.../src/main/resources/scripts/ql/join19/filter | 1 +
.../hive/src/main/resources/scripts/ql/join19/out | 232 ++--
.../main/resources/scripts/ql/join_filters/filter | 3 +-
.../src/main/resources/scripts/ql/join_filters/out | 24 +-
.../resources/scripts/ql/load_dyn_part14/filter | 6 +-
.../main/resources/scripts/ql/load_dyn_part14/out | 160 +-
.../scripts/ql/merge_dynamic_partition/filter | 4 +-
.../scripts/ql/merge_dynamic_partition/out | 226 ++--
.../main/resources/scripts/ql/multi_insert/filter | 5 +-
.../src/main/resources/scripts/ql/multi_insert/out | 1252 ++++++++-------
.../resources/scripts/ql/rcfile_columnar/filter | 3 +-
.../main/resources/scripts/ql/rcfile_columnar/out | 20 +-
.../src/main/resources/scripts/ql/stats8/filter | 6 -
.../hive/src/main/resources/scripts/ql/stats8/in | 47 -
.../hive/src/main/resources/scripts/ql/stats8/out | 224 ---
.../src/main/resources/scripts/ql/union3/filter | 4 +-
.../hive/src/main/resources/scripts/ql/union3/out | 128 +-
.../main/resources/scripts/ql/uniquejoin/filter | 3 +-
.../src/main/resources/scripts/ql/uniquejoin/out | 20 +-
35 files changed, 1722 insertions(+), 2235 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/filter
deleted file mode 100644
index 015b318..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/filter
+++ /dev/null
@@ -1,2 +0,0 @@
-sed -e 's#^grantTime.[0-9]*#grantTime\tJUSTNOW#' \
- -e 's#^grantor.*$#grantor\t\tBORG#'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/in
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/in b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/in
deleted file mode 100644
index 015fe80..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/in
+++ /dev/null
@@ -1,123 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-create table authorization_part (key int, value string) partitioned by (ds string);
-
-ALTER TABLE authorization_part SET TBLPROPERTIES ("PARTITION_LEVEL_PRIVILEGE"="TRUE");
-set hive.security.authorization.enabled=true;
-
--- column grant to user
-grant Create on table authorization_part to user hive_test_user;
-grant Update on table authorization_part to user hive_test_user;
-grant Drop on table authorization_part to user hive_test_user;
-grant select on table src to user hive_test_user;
-
-show grant user hive_test_user on table authorization_part;
-
-alter table authorization_part add partition (ds='2010');
-show grant user hive_test_user on table authorization_part partition (ds='2010');
-
-grant select(key) on table authorization_part to user hive_test_user;
-alter table authorization_part drop partition (ds='2010');
-insert overwrite table authorization_part partition (ds='2010') select key, value from src;
-show grant user hive_test_user on table authorization_part(key) partition (ds='2010');
-show grant user hive_test_user on table authorization_part(key);
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select(key) on table authorization_part from user hive_test_user;
-show grant user hive_test_user on table authorization_part(key);
-show grant user hive_test_user on table authorization_part(key) partition (ds='2010');
-
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select(key) on table authorization_part partition (ds='2010') from user hive_test_user;
-show grant user hive_test_user on table authorization_part(key) partition (ds='2010');
-
-alter table authorization_part drop partition (ds='2010');
-
--- table grant to user
-show grant user hive_test_user on table authorization_part;
-
-alter table authorization_part add partition (ds='2010');
-show grant user hive_test_user on table authorization_part partition (ds='2010');
-
-grant select on table authorization_part to user hive_test_user;
-alter table authorization_part drop partition (ds='2010');
-insert overwrite table authorization_part partition (ds='2010') select key, value from src;
-show grant user hive_test_user on table authorization_part partition (ds='2010');
-show grant user hive_test_user on table authorization_part;
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select on table authorization_part from user hive_test_user;
-show grant user hive_test_user on table authorization_part;
-show grant user hive_test_user on table authorization_part partition (ds='2010');
-
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select on table authorization_part partition (ds='2010') from user hive_test_user;
-show grant user hive_test_user on table authorization_part partition (ds='2010');
-
-alter table authorization_part drop partition (ds='2010');
-
--- column grant to group
-
-show grant group hive_test_group1 on table authorization_part;
-
-alter table authorization_part add partition (ds='2010');
-show grant group hive_test_group1 on table authorization_part partition (ds='2010');
-
-grant select(key) on table authorization_part to group hive_test_group1;
-alter table authorization_part drop partition (ds='2010');
-insert overwrite table authorization_part partition (ds='2010') select key, value from src;
-show grant group hive_test_group1 on table authorization_part(key) partition (ds='2010');
-show grant group hive_test_group1 on table authorization_part(key);
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select(key) on table authorization_part from group hive_test_group1;
-show grant group hive_test_group1 on table authorization_part(key);
-show grant group hive_test_group1 on table authorization_part(key) partition (ds='2010');
-
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select(key) on table authorization_part partition (ds='2010') from group hive_test_group1;
-show grant group hive_test_group1 on table authorization_part(key) partition (ds='2010');
-
-alter table authorization_part drop partition (ds='2010');
-
--- table grant to group
-show grant group hive_test_group1 on table authorization_part;
-
-alter table authorization_part add partition (ds='2010');
-show grant group hive_test_group1 on table authorization_part partition (ds='2010');
-
-grant select on table authorization_part to group hive_test_group1;
-alter table authorization_part drop partition (ds='2010');
-insert overwrite table authorization_part partition (ds='2010') select key, value from src;
-show grant group hive_test_group1 on table authorization_part partition (ds='2010');
-show grant group hive_test_group1 on table authorization_part;
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select on table authorization_part from group hive_test_group1;
-show grant group hive_test_group1 on table authorization_part;
-show grant group hive_test_group1 on table authorization_part partition (ds='2010');
-
-select key from authorization_part where ds='2010' order by key limit 20;
-
-revoke select on table authorization_part partition (ds='2010') from group hive_test_group1;
-show grant group hive_test_group1 on table authorization_part partition (ds='2010');
-
-
-revoke select on table src from user hive_test_user;
-set hive.security.authorization.enabled=false;
-drop table authorization_part;
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/out
deleted file mode 100644
index acae64c..0000000
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/authorization_2/out
+++ /dev/null
@@ -1,59 +0,0 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
-create table authorization_part (key int, value string) partitioned by (ds string)
-
-
-ALTER TABLE authorization_part SET TBLPROPERTIES ("PARTITION_LEVEL_PRIVILEGE"="TRUE")
-set hive.security.authorization.enabled=true
-
-
--- column grant to user
-grant Create on table authorization_part to user hive_test_user
-
-grant Update on table authorization_part to user hive_test_user
-
-grant Drop on table authorization_part to user hive_test_user
-
-grant select on table src to user hive_test_user
-
-
-show grant user hive_test_user on table authorization_part
-
-database default
-table authorization_part
-principalName hive_test_user
-principalType USER
-privilege Create
-grantTime 1301676873
-grantor hudson
-
-database default
-table authorization_part
-principalName hive_test_user
-principalType USER
-privilege Update
-grantTime 1301676874
-grantor hudson
-
-database default
-table authorization_part
-principalName hive_test_user
-principalType USER
-privilege Drop
-grantTime 1301676874
-grantor hudson
-
-
-alter table authorization_part add partition (ds='2010')
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/filter
index e67bc0d..9cdeb19 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/filter
@@ -1 +1,3 @@
-sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#'
+sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/out
index b58f461..092f64d 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/auto_join20/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
set hive.auto.convert.join = true
@@ -26,27 +12,25 @@ ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) src1) (TOK_TABREF (TOK_TABNAME src) src2) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (< (. (TOK_TABLE_OR_COL src1) key) 10))) (TOK_TABREF (TOK_TABNAME src) src3) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src3) key)) (< (. (TOK_TABLE_OR_COL src3) key) 20)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) v2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) key) k3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) value) v3)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v2)) (TOK_TABSO
RTCOLNAMEASC (TOK_TABLE_OR_COL k3)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v3))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) k1) (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL a) k2) (. (TOK_TABLE_OR_COL a) v2) (. (TOK_TABLE_OR_COL a) k3) (. (TOK_TABLE_OR_COL a) v3)))))))
STAGE DEPENDENCIES:
- Stage-8 is a root stage , consists of Stage-9, Stage-10, Stage-1
- Stage-9 has a backup stage: Stage-1
- Stage-6 depends on stages: Stage-9
- Stage-2 depends on stages: Stage-1, Stage-6, Stage-7
+ Stage-7 is a root stage , consists of Stage-8, Stage-1
+ Stage-8 has a backup stage: Stage-1
+ Stage-6 depends on stages: Stage-8
+ Stage-2 depends on stages: Stage-1, Stage-6
Stage-3 depends on stages: Stage-2
- Stage-10 has a backup stage: Stage-1
- Stage-7 depends on stages: Stage-10
Stage-1
Stage-0 is a root stage
STAGE PLANS:
- Stage: Stage-8
+ Stage: Stage-7
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-8
Map Reduce Local Work
Alias -> Map Local Tables:
a:src1
Fetch Operator
limit: -1
- a:src3
+ a:src2
Fetch Operator
limit: -1
Alias -> Map Local Operator Tree:
@@ -55,52 +39,52 @@ STAGE PLANS:
alias: src1
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 1
- a:src3
+ HashTable Sink Operator
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ 2 {key} {value}
+ filter predicates:
+ 0
+ 1
+ 2 {(key < 20.0)}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ 2 [Column[key]]
+ Position of Big Table: 2
+ a:src2
TableScan
- alias: src3
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 1
+ alias: src2
+ Filter Operator
+ predicate:
+ expr: (key < 10.0)
+ type: boolean
+ HashTable Sink Operator
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ 2 {key} {value}
+ filter predicates:
+ 0
+ 1
+ 2 {(key < 20.0)}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ 2 [Column[key]]
+ Position of Big Table: 2
Stage: Stage-6
Map Reduce
Alias -> Map Operator Tree:
- a:src2
+ a:src3
TableScan
- alias: src2
+ alias: src3
Map Join Operator
condition map:
Inner Join 0 to 1
@@ -112,14 +96,14 @@ STAGE PLANS:
filter predicates:
0
1
- 2 {(key < 20)}
+ 2 {(key < 20.0)}
handleSkewJoin: false
keys:
0 [Column[key]]
1 [Column[key]]
2 [Column[key]]
outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
- Position of Big Table: 1
+ Position of Big Table: 2
Select Operator
expressions:
expr: _col0
@@ -147,7 +131,7 @@ STAGE PLANS:
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-21-00_772_4864094195694055998/-mr-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-35-58_309_5301659382942807999/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -210,7 +194,7 @@ STAGE PLANS:
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-21-00_772_4864094195694055998/-mr-10003
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-35-58_309_5301659382942807999/-mr-10003
Reduce Output Operator
sort order:
tag: -1
@@ -236,155 +220,51 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- Stage: Stage-10
- Map Reduce Local Work
- Alias -> Map Local Tables:
- a:src1
- Fetch Operator
- limit: -1
- a:src2
- Fetch Operator
- limit: -1
- Alias -> Map Local Operator Tree:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
a:src1
TableScan
alias: src1
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 2
- a:src2
- TableScan
- alias: src2
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 2
-
- Stage: Stage-7
- Map Reduce
- Alias -> Map Operator Tree:
- a:src3
- TableScan
- alias: src3
- Map Join Operator
- condition map:
- Inner Join 0 to 1
- Right Outer Join0 to 2
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
- Position of Big Table: 2
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col4
+ Reduce Output Operator
+ key expressions:
+ expr: key
type: string
- expr: _col5
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
type: string
- expr: _col8
+ tag: 0
+ value expressions:
+ expr: key
type: string
- expr: _col9
+ expr: value
type: string
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Local Work:
- Map Reduce Local Work
-
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- a:src1
+ a:src2
TableScan
- alias: src1
+ alias: src2
Filter Operator
predicate:
- expr: (key < 10)
+ expr: (key < 10.0)
type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
- a:src2
- TableScan
- alias: src2
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
a:src3
TableScan
alias: src3
@@ -414,7 +294,7 @@ STAGE PLANS:
filter predicates:
0
1
- 2 {(VALUE._col0 < 20)}
+ 2 {(VALUE._col0 < 20.0)}
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
Select Operator
@@ -466,27 +346,25 @@ ABSTRACT SYNTAX TREE:
(TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) src1) (TOK_TABREF (TOK_TABNAME src) src2) (AND (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)) (< (. (TOK_TABLE_OR_COL src1) key) 10)) (< (. (TOK_TABLE_OR_COL src2) key) 15))) (TOK_TABREF (TOK_TABNAME src) src3) (AND (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src3) key)) (< (. (TOK_TABLE_OR_COL src3) key) 20)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) v2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) key) k3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) value) v3)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k2)) (TOK_TABSOR
TCOLNAMEASC (TOK_TABLE_OR_COL v2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k3)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v3))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) k1) (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL a) k2) (. (TOK_TABLE_OR_COL a) v2) (. (TOK_TABLE_OR_COL a) k3) (. (TOK_TABLE_OR_COL a) v3)))))))
STAGE DEPENDENCIES:
- Stage-8 is a root stage , consists of Stage-9, Stage-10, Stage-1
- Stage-9 has a backup stage: Stage-1
- Stage-6 depends on stages: Stage-9
- Stage-2 depends on stages: Stage-1, Stage-6, Stage-7
+ Stage-7 is a root stage , consists of Stage-8, Stage-1
+ Stage-8 has a backup stage: Stage-1
+ Stage-6 depends on stages: Stage-8
+ Stage-2 depends on stages: Stage-1, Stage-6
Stage-3 depends on stages: Stage-2
- Stage-10 has a backup stage: Stage-1
- Stage-7 depends on stages: Stage-10
Stage-1
Stage-0 is a root stage
STAGE PLANS:
- Stage: Stage-8
+ Stage: Stage-7
Conditional Operator
- Stage: Stage-9
+ Stage: Stage-8
Map Reduce Local Work
Alias -> Map Local Tables:
a:src1
Fetch Operator
limit: -1
- a:src3
+ a:src2
Fetch Operator
limit: -1
Alias -> Map Local Operator Tree:
@@ -495,31 +373,56 @@ STAGE PLANS:
alias: src1
Filter Operator
predicate:
- expr: (key < 10)
+ expr: ((key < 10.0) and (key < 15.0))
+ type: boolean
+ HashTable Sink Operator
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ 2 {key} {value}
+ filter predicates:
+ 0
+ 1
+ 2 {(key < 20.0)}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ 2 [Column[key]]
+ Position of Big Table: 2
+ a:src2
+ TableScan
+ alias: src2
+ Filter Operator
+ predicate:
+ expr: ((key < 15.0) and (key < 10.0))
type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 1
+ HashTable Sink Operator
+ condition expressions:
+ 0 {key} {value}
+ 1 {key} {value}
+ 2 {key} {value}
+ filter predicates:
+ 0
+ 1
+ 2 {(key < 20.0)}
+ handleSkewJoin: false
+ keys:
+ 0 [Column[key]]
+ 1 [Column[key]]
+ 2 [Column[key]]
+ Position of Big Table: 2
+
+ Stage: Stage-6
+ Map Reduce
+ Alias -> Map Operator Tree:
a:src3
TableScan
alias: src3
- HashTable Sink Operator
+ Map Join Operator
+ condition map:
+ Inner Join 0 to 1
+ Right Outer Join0 to 2
condition expressions:
0 {key} {value}
1 {key} {value}
@@ -527,75 +430,42 @@ STAGE PLANS:
filter predicates:
0
1
- 2 {(key < 20)}
+ 2 {(key < 20.0)}
handleSkewJoin: false
keys:
0 [Column[key]]
1 [Column[key]]
2 [Column[key]]
- Position of Big Table: 1
-
- Stage: Stage-6
- Map Reduce
- Alias -> Map Operator Tree:
- a:src2
- TableScan
- alias: src2
- Filter Operator
- predicate:
- expr: (key < 15)
- type: boolean
- Filter Operator
- predicate:
- expr: (key < 15)
- type: boolean
- Map Join Operator
- condition map:
- Inner Join 0 to 1
- Right Outer Join0 to 2
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
- Position of Big Table: 1
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
- type: string
- expr: _col4
- type: string
- expr: _col5
- type: string
- expr: _col8
- type: string
- expr: _col9
- type: string
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
+ Position of Big Table: 2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ expr: _col8
+ type: string
+ expr: _col9
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
Local Work:
Map Reduce Local Work
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-21-48_735_5280603062175628505/-mr-10002
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-37-20_822_7360885266220174309/-mr-10002
Reduce Output Operator
key expressions:
expr: _col0
@@ -658,7 +528,7 @@ STAGE PLANS:
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_10-21-48_735_5280603062175628505/-mr-10003
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-37-20_822_7360885266220174309/-mr-10003
Reduce Output Operator
sort order:
tag: -1
@@ -684,171 +554,51 @@ STAGE PLANS:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- Stage: Stage-10
- Map Reduce Local Work
- Alias -> Map Local Tables:
- a:src1
- Fetch Operator
- limit: -1
- a:src2
- Fetch Operator
- limit: -1
- Alias -> Map Local Operator Tree:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
a:src1
TableScan
alias: src1
Filter Operator
predicate:
- expr: (key < 10)
+ expr: ((key < 10.0) and (key < 15.0))
type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 2
- a:src2
- TableScan
- alias: src2
- Filter Operator
- predicate:
- expr: (key < 15)
- type: boolean
- Filter Operator
- predicate:
- expr: (key < 15)
- type: boolean
- HashTable Sink Operator
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- Position of Big Table: 2
-
- Stage: Stage-7
- Map Reduce
- Alias -> Map Operator Tree:
- a:src3
- TableScan
- alias: src3
- Map Join Operator
- condition map:
- Inner Join 0 to 1
- Right Outer Join0 to 2
- condition expressions:
- 0 {key} {value}
- 1 {key} {value}
- 2 {key} {value}
- filter predicates:
- 0
- 1
- 2 {(key < 20)}
- handleSkewJoin: false
- keys:
- 0 [Column[key]]
- 1 [Column[key]]
- 2 [Column[key]]
- outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
- Position of Big Table: 2
- Select Operator
- expressions:
- expr: _col0
- type: string
- expr: _col1
+ Reduce Output Operator
+ key expressions:
+ expr: key
type: string
- expr: _col4
- type: string
- expr: _col5
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
type: string
- expr: _col8
+ tag: 0
+ value expressions:
+ expr: key
type: string
- expr: _col9
+ expr: value
type: string
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Local Work:
- Map Reduce Local Work
-
- Stage: Stage-1
- Map Reduce
- Alias -> Map Operator Tree:
- a:src1
- TableScan
- alias: src1
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- Filter Operator
- predicate:
- expr: (key < 10)
- type: boolean
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 0
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
a:src2
TableScan
alias: src2
Filter Operator
predicate:
- expr: (key < 15)
+ expr: ((key < 15.0) and (key < 10.0))
type: boolean
- Filter Operator
- predicate:
- expr: (key < 15)
- type: boolean
- Reduce Output Operator
- key expressions:
- expr: key
- type: string
- sort order: +
- Map-reduce partition columns:
- expr: key
- type: string
- tag: 1
- value expressions:
- expr: key
- type: string
- expr: value
- type: string
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: 1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
a:src3
TableScan
alias: src3
@@ -878,7 +628,7 @@ STAGE PLANS:
filter predicates:
0
1
- 2 {(VALUE._col0 < 20)}
+ 2 {(VALUE._col0 < 20.0)}
handleSkewJoin: false
outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9
Select Operator
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/filter
index cb844d1..0bba73c 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/filter
@@ -1,3 +1,4 @@
sed -e 's#hdfs://[^/]*/#hdfs://HADOOP/#' \
-e 's#Copying file:.*u.data#Copying file: u.data#' \
- -e '/^Deleted.*u_data$/d'
+ -e '/^Deleted.*u_data$/d' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/out
index 5e94525..87b086b 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/basic/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
DROP TABLE u_data
@@ -27,8 +13,8 @@ STORED AS TEXTFILE
LOAD DATA LOCAL INPATH 'seed_data_files/ml-data/u.data'
OVERWRITE INTO TABLE u_data
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster-clone/examples/hive/target/seed_data_files/ml-data/u.data
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/u_data
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/ml-data/u.data
+Deleted /user/hive/warehouse/u_data
INSERT OVERWRITE DIRECTORY '/tmp/count'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/filter
index 26c4a38..b65fd71 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/filter
@@ -1,3 +1,5 @@
sed -re 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
-e 's#Copying file:.*/kv..txt#Copying file:kvX.txt#' \
- -e '/^Deleted hdfs:/d'
+ -e '/^Deleted hdfs:/d' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/out
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/out b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/out
index 945016d..00ab302 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/out
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketizedhiveinputformat/out
@@ -1,17 +1,3 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
--- contributor license agreements. See the NOTICE file distributed with
--- this work for additional information regarding copyright ownership.
--- The ASF licenses this file to You under the Apache License, Version 2.0
--- (the "License") you may not use this file except in compliance with
--- the License. You may obtain a copy of the License at
---
--- http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
set hive.input.format=org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat
set mapred.min.split.size = 64
@@ -20,7 +6,7 @@ CREATE TABLE T1(name STRING) STORED AS TEXTFILE
LOAD DATA LOCAL INPATH 'seed_data_files/kv1.txt' INTO TABLE T1
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/kv1.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/kv1.txt
CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE
@@ -161,7 +147,7 @@ STAGE PLANS:
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- hdfs://monster01.sf.cloudera.com:17020/tmp/hive-hudson/hive_2011-04-01_12-29-52_242_1273761163922951124/-mr-10003
+ hdfs://mgrover-bigtop-centos-1.ent.cloudera.com:17020/tmp/hive-root/hive_2013-04-23_11-41-40_405_3858259332039900449/-mr-10003
Reduce Output Operator
sort order:
tag: -1
@@ -203,7 +189,7 @@ SELECT tmp1.name as name FROM (
JOIN (SELECT 'MMM' AS n FROM T1) tmp2
JOIN (SELECT 'MMM' AS n FROM T1) tmp3
ON tmp1.n = tmp2.n AND tmp1.n = tmp3.n) ttt LIMIT 5000000
-Deleted hdfs://monster01.sf.cloudera.com:17020/user/hive/warehouse/t2
+Deleted /user/hive/warehouse/t2
EXPLAIN SELECT COUNT(1) FROM T2
@@ -266,10 +252,10 @@ SELECT COUNT(1) FROM T2
CREATE TABLE T3(name STRING) STORED AS TEXTFILE
LOAD DATA LOCAL INPATH 'seed_data_files/kv1.txt' INTO TABLE T3
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/kv1.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/kv1.txt
LOAD DATA LOCAL INPATH 'seed_data_files/kv2.txt' INTO TABLE T3
-Copying file: file:/var/lib/hudson/workspace/Nightly-smoke-testing-monster/examples/hive/target/seed_data_files/kv2.txt
+Copying file: file:/root/bigtop/bigtop-tests/test-execution/smokes/hive/target/seed_data_files/kv2.txt
EXPLAIN SELECT COUNT(1) FROM T3
http://git-wip-us.apache.org/repos/asf/bigtop/blob/8d32a92d/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/filter
----------------------------------------------------------------------
diff --git a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/filter b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/filter
index 69b7bb7..2384b8a 100644
--- a/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/filter
+++ b/bigtop-tests/test-artifacts/hive/src/main/resources/scripts/ql/bucketmapjoin5/filter
@@ -1,4 +1,6 @@
sed -re 's#hdfs://[^/]*/#hdfs://HADOOP/#g' \
-e 's#Copying file:.*/srcbucket2#Copying file:srcbucket2#' \
-e 's#hdfs://.*/-(ext|mr)-1000#hdfs://HADOOP/-\1-1000#' \
- -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#'
+ -e 's#transient_lastDdlTime [0-9]*#transient_lastDdlTime JUSTNOW#' \
+ -e 's#file:/.*/-(ext|mr)-1000#file:/HADOOP/-\1-1000#' \
+ -e '/.*jobconf.xml:an attempt to override final parameter: mapreduce.job.end-notification.*; Ignoring\./ d'