You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/05/08 07:09:26 UTC
svn commit: r1480161 - in /hive/trunk:
metastore/src/java/org/apache/hadoop/hive/metastore/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
Author: hashutosh
Date: Wed May 8 05:09:25 2013
New Revision: 1480161
URL: http://svn.apache.org/r1480161
Log:
HIVE-4392 : Illogical InvalidObjectException throwed when use mulit aggregate functions with star columns (Navis via Ashutosh Chauhan)
Added:
hive/trunk/ql/src/test/queries/clientpositive/ctas_colname.q
hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out
Modified:
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=1480161&r1=1480160&r2=1480161&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java Wed May 8 05:09:25 2013
@@ -64,11 +64,14 @@ public class HiveAlterHandler implements
throw new InvalidOperationException("New table is invalid: " + newt);
}
- if (!MetaStoreUtils.validateName(newt.getTableName())
- || !MetaStoreUtils.validateTblColumns(newt.getSd().getCols())) {
+ if (!MetaStoreUtils.validateName(newt.getTableName())) {
throw new InvalidOperationException(newt.getTableName()
+ " is not a valid object name");
}
+ String validate = MetaStoreUtils.validateTblColumns(newt.getSd().getCols());
+ if (validate != null) {
+ throw new InvalidOperationException("Invalid column " + validate);
+ }
Path srcPath = null;
FileSystem srcFs = null;
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1480161&r1=1480160&r2=1480161&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed May 8 05:09:25 2013
@@ -80,6 +80,7 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
import org.apache.hadoop.hive.metastore.api.Type;
@@ -1003,20 +1004,32 @@ public class HiveMetaStore extends Thrif
throws AlreadyExistsException, MetaException,
InvalidObjectException, NoSuchObjectException {
- if (!MetaStoreUtils.validateName(tbl.getTableName())
- || !MetaStoreUtils.validateTblColumns(tbl.getSd().getCols())
- || (tbl.getPartitionKeys() != null && !MetaStoreUtils
- .validateTblColumns(tbl.getPartitionKeys()))
- || !MetaStoreUtils.validateSkewedColNames(
- (null == tbl.getSd().getSkewedInfo()) ?
- null : tbl.getSd().getSkewedInfo().getSkewedColNames())
- || !MetaStoreUtils.validateSkewedColNamesSubsetCol(
- (null == tbl.getSd().getSkewedInfo()) ?
- null : tbl.getSd().getSkewedInfo().getSkewedColNames(),
- tbl.getSd().getCols())) {
+ if (!MetaStoreUtils.validateName(tbl.getTableName())) {
throw new InvalidObjectException(tbl.getTableName()
+ " is not a valid object name");
}
+ String validate = MetaStoreUtils.validateTblColumns(tbl.getSd().getCols());
+ if (validate != null) {
+ throw new InvalidObjectException("Invalid column " + validate);
+ }
+ if (tbl.getPartitionKeys() != null) {
+ validate = MetaStoreUtils.validateTblColumns(tbl.getPartitionKeys());
+ if (validate != null) {
+ throw new InvalidObjectException("Invalid partition column " + validate);
+ }
+ }
+ SkewedInfo skew = tbl.getSd().getSkewedInfo();
+ if (skew != null) {
+ validate = MetaStoreUtils.validateSkewedColNames(skew.getSkewedColNames());
+ if (validate != null) {
+ throw new InvalidObjectException("Invalid skew column " + validate);
+ }
+ validate = MetaStoreUtils.validateSkewedColNamesSubsetCol(
+ skew.getSkewedColNames(), tbl.getSd().getCols());
+ if (validate != null) {
+ throw new InvalidObjectException("Invalid skew column " + validate);
+ }
+ }
Path tblPath = null;
boolean success = false, madeDir = false;
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1480161&r1=1480160&r2=1480161&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Wed May 8 05:09:25 2013
@@ -352,16 +352,16 @@ public class MetaStoreUtils {
return false;
}
- static public boolean validateTblColumns(List<FieldSchema> cols) {
+ static public String validateTblColumns(List<FieldSchema> cols) {
for (FieldSchema fieldSchema : cols) {
if (!validateName(fieldSchema.getName())) {
- return false;
+ return "name: " + fieldSchema.getName();
}
if (!validateColumnType(fieldSchema.getType())) {
- return false;
+ return "type: " + fieldSchema.getType();
}
}
- return true;
+ return null;
}
static void throwExceptionIfIncompatibleColTypeChange(
@@ -434,22 +434,22 @@ public class MetaStoreUtils {
return true;
}
- public static boolean validateSkewedColNames(List<String> cols) {
+ public static String validateSkewedColNames(List<String> cols) {
if (null == cols) {
- return true;
+ return null;
}
for (String col : cols) {
if (!validateName(col)) {
- return false;
+ return col;
}
}
- return true;
+ return null;
}
- public static boolean validateSkewedColNamesSubsetCol(List<String> skewedColNames,
+ public static String validateSkewedColNamesSubsetCol(List<String> skewedColNames,
List<FieldSchema> cols) {
if (null == skewedColNames) {
- return true;
+ return null;
}
List<String> colNames = new ArrayList<String>();
for (FieldSchema fieldSchema : cols) {
@@ -459,7 +459,10 @@ public class MetaStoreUtils {
List<String> copySkewedColNames = new ArrayList<String>(skewedColNames);
// remove valid columns
copySkewedColNames.removeAll(colNames);
- return (copySkewedColNames.size() > 0) ? false : true;
+ if (copySkewedColNames.isEmpty()) {
+ return null;
+ }
+ return copySkewedColNames.toString();
}
public static String getListType(String t) {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1480161&r1=1480160&r2=1480161&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Wed May 8 05:09:25 2013
@@ -1050,8 +1050,7 @@ public class PTFTranslator {
}
ColumnInfo cInfo = new ColumnInfo(wFnDef.getAlias(),
TypeInfoUtils.getTypeInfoFromObjectInspector(wFnOI),
- null,
- false);
+ null, true, true);
rr.putExpression(ast, cInfo);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1480161&r1=1480160&r2=1480161&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed May 8 05:09:25 2013
@@ -5167,10 +5167,11 @@ public class SemanticAnalyzer extends Ba
if (field_schemas != null) {
FieldSchema col = new FieldSchema();
- if (nm[1] != null) {
- col.setName(unescapeIdentifier(colInfo.getAlias()).toLowerCase()); // remove ``
- } else {
+ if ("".equals(nm[0]) || nm[1] == null) {
+ // ast expression is not a valid column name for table
col.setName(colInfo.getInternalName());
+ } else {
+ col.setName(unescapeIdentifier(colInfo.getAlias()).toLowerCase()); // remove ``
}
col.setType(colInfo.getType().getTypeName());
field_schemas.add(col);
@@ -7385,6 +7386,10 @@ public class SemanticAnalyzer extends Ba
for (ColumnInfo colInfo : rr.getColumnInfos()) {
String name = colInfo.getInternalName();
String[] tmp = rr.reverseLookup(name);
+ if ("".equals(tmp[0]) || tmp[1] == null) {
+ // ast expression is not a valid column name for table
+ tmp[1] = colInfo.getInternalName();
+ }
newRR.put(alias, tmp[1], colInfo);
}
opParseCtx.get(curr).setRowResolver(newRR);
Added: hive/trunk/ql/src/test/queries/clientpositive/ctas_colname.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ctas_colname.q?rev=1480161&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ctas_colname.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ctas_colname.q Wed May 8 05:09:25 2013
@@ -0,0 +1,48 @@
+-- HIVE-4392, column aliases from expressionRR (GBY, etc.) are not valid name for table
+-- use internal name as column name instead
+
+-- group by
+explain
+create table summary as select *, sum(key), count(value) from src;
+create table summary as select *, sum(key), count(value) from src;
+describe formatted summary;
+select * from summary;
+
+-- window functions
+explain
+create table x4 as select *, rank() over(partition by key order by value) as rr from src1;
+create table x4 as select *, rank() over(partition by key order by value) as rr from src1;
+describe formatted x4;
+select * from x4;
+
+explain
+create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20;
+create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20;
+describe formatted x5;
+select * from x5;
+
+-- sub queries
+explain
+create table x6 as select * from (select *, max(key) from src1) a;
+create table x6 as select * from (select *, max(key) from src1) a;
+describe formatted x6;
+select * from x6;
+
+explain
+create table x7 as select * from (select * from src group by key) a;
+create table x7 as select * from (select * from src group by key) a;
+describe formatted x7;
+select * from x7;
+
+explain
+create table x8 as select * from (select * from src group by key having key < 9) a;
+create table x8 as select * from (select * from src group by key having key < 9) a;
+describe formatted x8;
+select * from x8;
+
+explain
+create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a;
+create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a;
+describe formatted x9;
+select * from x9;
+
Added: hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out?rev=1480161&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ctas_colname.q.out Wed May 8 05:09:25 2013
@@ -0,0 +1,1359 @@
+PREHOOK: query: -- HIVE-4392, column aliases from expressionRR (GBY, etc.) are not valid name for table
+-- use internal name as column name instead
+
+-- group by
+explain
+create table summary as select *, sum(key), count(value) from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: -- HIVE-4392, column aliases from expressionRR (GBY, etc.) are not valid name for table
+-- use internal name as column name instead
+
+-- group by
+explain
+create table summary as select *, sum(key), count(value) from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME summary) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL key))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL value)))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: key, value
+ Group By Operator
+ aggregations:
+ expr: sum(key)
+ expr: count(value)
+ bucketGroup: false
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: double
+ expr: _col1
+ type: bigint
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: sum(VALUE._col0)
+ expr: count(VALUE._col1)
+ bucketGroup: false
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: double
+ expr: _col1
+ type: bigint
+ expr: _col0
+ type: double
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.summary
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: _col0 double, _col1 bigint, _c1 double, _c2 bigint
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: summary
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table summary as select *, sum(key), count(value) from src
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table summary as select *, sum(key), count(value) from src
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@summary
+PREHOOK: query: describe formatted summary
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted summary
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+_col0 double None
+_col1 bigint None
+_c1 double None
+_c2 bigint None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 1
+ rawDataSize 25
+ totalSize 26
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from summary
+PREHOOK: type: QUERY
+PREHOOK: Input: default@summary
+#### A masked pattern was here ####
+POSTHOOK: query: select * from summary
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@summary
+#### A masked pattern was here ####
+130091.0 500 130091.0 500
+PREHOOK: query: -- window functions
+explain
+create table x4 as select *, rank() over(partition by key order by value) as rr from src1
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: -- window functions
+explain
+create table x4 as select *, rank() over(partition by key order by value) as rr from src1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x4) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF) (TOK_SELEXPR (TOK_FUNCTION rank (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))) rr (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src1
+ TableScan
+ alias: src1
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ PTF Operator
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _wcol0
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x4
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: key string, value string, rr int
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x4
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x4 as select *, rank() over(partition by key order by value) as rr from src1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src1
+POSTHOOK: query: create table x4 as select *, rank() over(partition by key order by value) as rr from src1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@x4
+PREHOOK: query: describe formatted x4
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x4
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key string None
+value string None
+rr int None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 25
+ rawDataSize 242
+ totalSize 267
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x4
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x4
+#### A masked pattern was here ####
+ 1
+ 1
+ 1
+ 1
+ val_165 5
+ val_193 6
+ val_265 7
+ val_27 8
+ val_409 9
+ val_484 10
+128 1
+146 val_146 1
+150 val_150 1
+213 val_213 1
+224 1
+238 val_238 1
+255 val_255 1
+273 val_273 1
+278 val_278 1
+311 val_311 1
+369 1
+401 val_401 1
+406 val_406 1
+66 val_66 1
+98 val_98 1
+PREHOOK: query: explain
+create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain
+create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x5) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF) (TOK_SELEXPR (TOK_FUNCTION lead (TOK_TABLE_OR_COL key) 1 (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))) (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))))) (TOK_LIMIT 20))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1
+ Stage-0 depends on stages: Stage-2
+ Stage-4 depends on stages: Stage-0
+ Stage-3 depends on stages: Stage-4
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ PTF Operator
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _wcol0
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ Reduce Operator Tree:
+ Extract
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x5
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-4
+ Create Table Operator:
+ Create Table
+ columns: key string, value string, tok_windowspec string
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x5
+ isExternal: false
+
+ Stage: Stage-3
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@x5
+PREHOOK: query: describe formatted x5
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x5
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+key string None
+value string None
+tok_windowspec string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 20
+ rawDataSize 268
+ totalSize 288
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x5
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x5
+#### A masked pattern was here ####
+0 val_0 0
+0 val_0 0
+0 val_0 NULL
+10 val_10 NULL
+100 val_100 100
+100 val_100 NULL
+103 val_103 103
+103 val_103 NULL
+104 val_104 104
+104 val_104 NULL
+105 val_105 NULL
+11 val_11 NULL
+111 val_111 NULL
+113 val_113 113
+113 val_113 NULL
+114 val_114 NULL
+116 val_116 NULL
+118 val_118 118
+118 val_118 NULL
+119 val_119 119
+PREHOOK: query: -- sub queries
+explain
+create table x6 as select * from (select *, max(key) from src1) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: -- sub queries
+explain
+create table x6 as select * from (select *, max(key) from src1) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x6) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF) (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL key)))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:src1
+ TableScan
+ alias: src1
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ outputColumnNames: key
+ Group By Operator
+ aggregations:
+ expr: max(key)
+ bucketGroup: false
+ mode: hash
+ outputColumnNames: _col0
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: max(VALUE._col0)
+ bucketGroup: false
+ mode: mergepartial
+ outputColumnNames: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col0
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x6
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: _col0 string, _c1 string
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x6
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x6 as select * from (select *, max(key) from src1) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src1
+POSTHOOK: query: create table x6 as select * from (select *, max(key) from src1) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@x6
+PREHOOK: query: describe formatted x6
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x6
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+_col0 string None
+_c1 string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 1
+ rawDataSize 5
+ totalSize 6
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x6
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x6
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x6
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x6
+#### A masked pattern was here ####
+98 98
+PREHOOK: query: explain
+create table x7 as select * from (select * from src group by key) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain
+create table x7 as select * from (select * from src group by key) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x7) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ outputColumnNames: key
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: key
+ type: string
+ mode: hash
+ outputColumnNames: _col0
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x7
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: _col0 string
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x7
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x7 as select * from (select * from src group by key) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table x7 as select * from (select * from src group by key) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@x7
+PREHOOK: query: describe formatted x7
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x7
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+_col0 string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 309
+ rawDataSize 864
+ totalSize 1173
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x7
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x7
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x7
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x7
+#### A masked pattern was here ####
+0
+10
+100
+103
+104
+105
+11
+111
+113
+114
+116
+118
+119
+12
+120
+125
+126
+128
+129
+131
+133
+134
+136
+137
+138
+143
+145
+146
+149
+15
+150
+152
+153
+155
+156
+157
+158
+160
+162
+163
+164
+165
+166
+167
+168
+169
+17
+170
+172
+174
+175
+176
+177
+178
+179
+18
+180
+181
+183
+186
+187
+189
+19
+190
+191
+192
+193
+194
+195
+196
+197
+199
+2
+20
+200
+201
+202
+203
+205
+207
+208
+209
+213
+214
+216
+217
+218
+219
+221
+222
+223
+224
+226
+228
+229
+230
+233
+235
+237
+238
+239
+24
+241
+242
+244
+247
+248
+249
+252
+255
+256
+257
+258
+26
+260
+262
+263
+265
+266
+27
+272
+273
+274
+275
+277
+278
+28
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+291
+292
+296
+298
+30
+302
+305
+306
+307
+308
+309
+310
+311
+315
+316
+317
+318
+321
+322
+323
+325
+327
+33
+331
+332
+333
+335
+336
+338
+339
+34
+341
+342
+344
+345
+348
+35
+351
+353
+356
+360
+362
+364
+365
+366
+367
+368
+369
+37
+373
+374
+375
+377
+378
+379
+382
+384
+386
+389
+392
+393
+394
+395
+396
+397
+399
+4
+400
+401
+402
+403
+404
+406
+407
+409
+41
+411
+413
+414
+417
+418
+419
+42
+421
+424
+427
+429
+43
+430
+431
+432
+435
+436
+437
+438
+439
+44
+443
+444
+446
+448
+449
+452
+453
+454
+455
+457
+458
+459
+460
+462
+463
+466
+467
+468
+469
+47
+470
+472
+475
+477
+478
+479
+480
+481
+482
+483
+484
+485
+487
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+5
+51
+53
+54
+57
+58
+64
+65
+66
+67
+69
+70
+72
+74
+76
+77
+78
+8
+80
+82
+83
+84
+85
+86
+87
+9
+90
+92
+95
+96
+97
+98
+PREHOOK: query: explain
+create table x8 as select * from (select * from src group by key having key < 9) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain
+create table x8 as select * from (select * from src group by key having key < 9) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x8) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (< (TOK_TABLE_OR_COL key) 9)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:src
+ TableScan
+ alias: src
+ Filter Operator
+ predicate:
+ expr: (key < 9.0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ outputColumnNames: key
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: key
+ type: string
+ mode: hash
+ outputColumnNames: _col0
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ outputColumnNames: _col0
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x8
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: _col0 string
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x8
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x8 as select * from (select * from src group by key having key < 9) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table x8 as select * from (select * from src group by key having key < 9) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@x8
+PREHOOK: query: describe formatted x8
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x8
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+_col0 string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 5
+ rawDataSize 5
+ totalSize 10
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x8
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x8
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x8
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x8
+#### A masked pattern was here ####
+0
+2
+4
+5
+8
+PREHOOK: query: explain
+create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain
+create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+ABSTRACT SYNTAX TREE:
+ (TOK_CREATETABLE (TOK_TABNAME x9) TOK_LIKETABLE (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL value))) (TOK_SELEXPR (TOK_TABLE_OR_COL key))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_HAVING (AND (< (TOK_TABLE_OR_COL key) 9) (TOK_FUNCTION TOK_ISNOTNULL (TOK_FUNCTION max (TOK_TABLE_OR_COL value))))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-3 depends on stages: Stage-0
+ Stage-2 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a:src
+ TableScan
+ alias: src
+ Filter Operator
+ predicate:
+ expr: (key < 9.0)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: key, value
+ Group By Operator
+ aggregations:
+ expr: max(value)
+ bucketGroup: false
+ keys:
+ expr: key
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ tag: -1
+ value expressions:
+ expr: _col1
+ type: string
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations:
+ expr: max(VALUE._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1
+ Filter Operator
+ predicate:
+ expr: ((_col0 < 9.0) and _col1 is not null)
+ type: boolean
+ Select Operator
+ expressions:
+ expr: _col1
+ type: string
+ expr: _col0
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ name: default.x9
+
+ Stage: Stage-0
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-3
+ Create Table Operator:
+ Create Table
+ columns: _c0 string, key string
+ if not exists: false
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ # buckets: -1
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ name: x9
+ isExternal: false
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+POSTHOOK: query: create table x9 as select * from (select max(value),key from src group by key having key < 9 AND max(value) IS NOT NULL) a
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@x9
+PREHOOK: query: describe formatted x9
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted x9
+POSTHOOK: type: DESCTABLE
+# col_name data_type comment
+
+_c0 string None
+key string None
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Protect Mode: None
+Retention: 0
+#### A masked pattern was here ####
+Table Type: MANAGED_TABLE
+Table Parameters:
+ numFiles 1
+ numPartitions 0
+ numRows 5
+ rawDataSize 35
+ totalSize 40
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+PREHOOK: query: select * from x9
+PREHOOK: type: QUERY
+PREHOOK: Input: default@x9
+#### A masked pattern was here ####
+POSTHOOK: query: select * from x9
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@x9
+#### A masked pattern was here ####
+val_0 0
+val_2 2
+val_4 4
+val_5 5
+val_8 8