You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/12/01 06:37:23 UTC
[3/3] hive git commit: HIVE-12184: DESCRIBE of fully qualified table
fails when db and table name match and non-default database is in use (Naveen
via Xuefu)
HIVE-12184: DESCRIBE of fully qualified table fails when db and table name match and non-default database is in use (Naveen via Xuefu)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e9ca6870
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e9ca6870
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e9ca6870
Branch: refs/heads/master
Commit: e9ca6870df889e03e8fa6888d7fbb51c4fbaf20a
Parents: 3a17d42
Author: Xuefu Zhang <xz...@Cloudera.com>
Authored: Mon Nov 30 21:37:11 2015 -0800
Committer: Xuefu Zhang <xz...@Cloudera.com>
Committed: Mon Nov 30 21:37:11 2015 -0800
----------------------------------------------------------------------
.../hive/ql/parse/DDLSemanticAnalyzer.java | 226 ++++-------
.../apache/hadoop/hive/ql/parse/HiveParser.g | 23 +-
.../test/queries/clientnegative/desc_failure4.q | 5 +
.../queries/clientnegative/describe_xpath1.q | 2 +-
.../queries/clientnegative/describe_xpath2.q | 2 +-
.../queries/clientnegative/describe_xpath3.q | 2 +-
.../queries/clientnegative/describe_xpath4.q | 2 +-
.../alter_partition_update_status.q | 12 +-
.../alter_table_invalidate_column_stats.q | 74 ++--
.../clientpositive/alter_table_update_status.q | 10 +-
.../queries/clientpositive/analyze_tbl_part.q | 8 +-
.../queries/clientpositive/colstats_all_nulls.q | 4 +-
.../clientpositive/columnstats_part_coltype.q | 42 +-
.../clientpositive/columnstats_partlvl.q | 12 +-
.../clientpositive/columnstats_partlvl_dp.q | 20 +-
.../queries/clientpositive/compustat_avro.q | 4 +-
.../clientpositive/confirm_initial_tbl_stats.q | 22 +-
.../queries/clientpositive/describe_syntax.q | 10 +-
.../queries/clientpositive/describe_table.q | 64 ++-
.../queries/clientpositive/describe_xpath.q | 12 +-
.../extrapolate_part_stats_full.q | 2 +-
.../extrapolate_part_stats_partial.q | 4 +-
.../extrapolate_part_stats_partial_ndv.q | 44 +--
.../clientpositive/partition_coltype_literals.q | 4 +-
.../queries/clientpositive/stats_only_null.q | 2 +-
.../results/clientnegative/desc_failure3.q.out | 2 +-
.../results/clientnegative/desc_failure4.q.out | 21 +
.../clientnegative/describe_xpath1.q.out | 2 +-
.../clientnegative/describe_xpath2.q.out | 2 +-
.../clientnegative/describe_xpath3.q.out | 2 +-
.../clientnegative/describe_xpath4.q.out | 2 +-
.../clientnegative/drop_database_cascade.q.out | 2 +-
.../alter_partition_update_status.q.out | 20 +-
.../alter_table_invalidate_column_stats.q.out | 144 +++----
.../alter_table_update_status.q.out | 20 +-
.../results/clientpositive/ambiguitycheck.q.out | 4 +-
.../clientpositive/analyze_tbl_part.q.out | 12 +-
.../clientpositive/colstats_all_nulls.q.out | 8 +-
.../columnstats_part_coltype.q.out | 84 ++--
.../clientpositive/columnstats_partlvl.q.out | 24 +-
.../clientpositive/columnstats_partlvl_dp.q.out | 40 +-
.../results/clientpositive/compustat_avro.q.out | 8 +-
.../confirm_initial_tbl_stats.q.out | 44 +--
.../clientpositive/describe_syntax.q.out | 20 +-
.../results/clientpositive/describe_table.q.out | 390 ++++++++++++++++++-
.../results/clientpositive/describe_xpath.q.out | 24 +-
.../extrapolate_part_stats_full.q.out | 4 +-
.../extrapolate_part_stats_partial.q.out | 8 +-
.../extrapolate_part_stats_partial_ndv.q.out | 88 ++---
.../clientpositive/llap/stats_only_null.q.out | 4 +-
.../partition_coltype_literals.q.out | 8 +-
.../clientpositive/spark/stats_only_null.q.out | 4 +-
.../clientpositive/stats_only_null.q.out | 4 +-
.../clientpositive/tez/stats_only_null.q.out | 4 +-
54 files changed, 999 insertions(+), 612 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index eea2fcc..757542d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -1717,158 +1717,65 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
}
- // assume the first component of DOT delimited name is tableName
- // get the attemptTableName
- static public String getAttemptTableName(Hive db, String qualifiedName, boolean isColumn)
- throws SemanticException {
- // check whether the name starts with table
- // DESCRIBE table
- // DESCRIBE table.column
- // DESCRIBE table column
- String tableName = qualifiedName.substring(0,
- qualifiedName.indexOf('.') == -1 ?
- qualifiedName.length() : qualifiedName.indexOf('.'));
- try {
- Table tab = db.getTable(tableName);
- if (tab != null) {
- if (isColumn) {
- // if attempt to get columnPath
- // return the whole qualifiedName(table.column or table)
- return qualifiedName;
- } else {
- // if attempt to get tableName
- // return table
- return tableName;
- }
- }
- } catch (InvalidTableException e) {
- // assume the first DOT delimited component is tableName
- // OK if it is not
- // do nothing when having exception
- return null;
- } catch (HiveException e) {
- throw new SemanticException(e.getMessage(), e);
- }
- return null;
- }
+ // get the column path
+ // return column name if exists, column could be DOT separated.
+ // example: lintString.$elem$.myint
+ // return table name for column name if no column has been specified.
+ static public String getColPath(
+ Hive db,
+ ASTNode node,
+ String dbName,
+ String tableName,
+ Map<String, String> partSpec) throws SemanticException {
- // get Database Name
- static public String getDBName(Hive db, ASTNode ast) {
- String dbName = null;
- String fullyQualifiedName = getFullyQualifiedName(ast);
+ // if this ast has only one child, then no column name specified.
+ if (node.getChildCount() == 1) {
+ return tableName;
+ }
- // if database.table or database.table.column or table.column
- // first try the first component of the DOT separated name
- if (ast.getChildCount() >= 2) {
- dbName = fullyQualifiedName.substring(0,
- fullyQualifiedName.indexOf('.') == -1 ?
- fullyQualifiedName.length() :
- fullyQualifiedName.indexOf('.'));
- try {
- // if the database name is not valid
- // it is table.column
- // return null as dbName
- if (!db.databaseExists(dbName)) {
- return null;
- }
- } catch (HiveException e) {
- return null;
+ ASTNode columnNode = null;
+ // Second child node could be partitionspec or column
+ if (node.getChildCount() > 1) {
+ if (partSpec == null) {
+ columnNode = (ASTNode) node.getChild(1);
+ } else {
+ columnNode = (ASTNode) node.getChild(2);
}
- } else {
- // in other cases, return null
- // database is not validated if null
- return null;
}
- return dbName;
- }
- // get Table Name
- static public String getTableName(Hive db, ASTNode ast)
- throws SemanticException {
- String tableName = null;
- String fullyQualifiedName = getFullyQualifiedName(ast);
-
- // assume the first component of DOT delimited name is tableName
- String attemptTableName = getAttemptTableName(db, fullyQualifiedName, false);
- if (attemptTableName != null) {
- return attemptTableName;
- }
-
- // if the name does not start with table
- // it should start with database
- // DESCRIBE database.table
- // DESCRIBE database.table column
- if (fullyQualifiedName.split(delimiter).length == 3) {
- // if DESCRIBE database.table.column
- // invalid syntax exception
- if (ast.getChildCount() == 2) {
- throw new SemanticException(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(fullyQualifiedName));
+ if (columnNode != null) {
+ if (dbName == null) {
+ return tableName + "." + QualifiedNameUtil.getFullyQualifiedName(columnNode);
} else {
- // if DESCRIBE database.table column
- // return database.table as tableName
- tableName = fullyQualifiedName.substring(0,
- fullyQualifiedName.lastIndexOf('.'));
+ return tableName.substring(dbName.length() + 1, tableName.length()) + "." +
+ QualifiedNameUtil.getFullyQualifiedName(columnNode);
}
- } else if (fullyQualifiedName.split(delimiter).length == 2) {
- // if DESCRIBE database.table
- // return database.table as tableName
- tableName = fullyQualifiedName;
} else {
- // if fullyQualifiedName only have one component
- // it is an invalid table
- throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(fullyQualifiedName));
+ return tableName;
}
-
- return tableName;
}
- // get column path
- static public String getColPath(
- Hive db,
- ASTNode parentAst,
- ASTNode ast,
- String tableName,
- Map<String, String> partSpec) throws SemanticException {
-
- // if parent has two children
- // it could be DESCRIBE table key
- // or DESCRIBE table partition
- if (parentAst.getChildCount() == 2 && partSpec == null) {
- // if partitionSpec is null
- // it is DESCRIBE table key
- // return table as columnPath
- return getFullyQualifiedName(parentAst);
+ // get partition metadata
+ static public Map<String, String> getPartitionSpec(Hive db, ASTNode ast, String tableName)
+ throws SemanticException {
+ ASTNode partNode = null;
+ // if this ast has only one child, then no partition spec specified.
+ if (ast.getChildCount() == 1) {
+ return null;
}
- // assume the first component of DOT delimited name is tableName
- String attemptTableName = getAttemptTableName(db, tableName, true);
- if (attemptTableName != null) {
- return attemptTableName;
+ // if ast has two children
+ // the 2nd child could be partition spec or columnName
+ // if the ast has 3 children, the second *has to* be partition spec
+ if (ast.getChildCount() > 2 && (((ASTNode) ast.getChild(1)).getType() != HiveParser.TOK_PARTSPEC)) {
+ throw new SemanticException(((ASTNode) ast.getChild(1)).getType() + " is not a partition specification");
}
- // if the name does not start with table
- // it should start with database
- // DESCRIBE database.table
- // DESCRIBE database.table column
- if (tableName.split(delimiter).length == 3) {
- // if DESCRIBE database.table column
- // return table.column as column path
- return tableName.substring(
- tableName.indexOf(".") + 1, tableName.length());
+ if (((ASTNode) ast.getChild(1)).getType() == HiveParser.TOK_PARTSPEC) {
+ partNode = (ASTNode) ast.getChild(1);
}
- // in other cases, column path is the same as tableName
- return tableName;
- }
-
- // get partition metadata
- static public Map<String, String> getPartitionSpec(Hive db, ASTNode ast, String tableName)
- throws SemanticException {
- // if ast has two children
- // it could be DESCRIBE table key
- // or DESCRIBE table partition
- // check whether it is DESCRIBE table partition
- if (ast.getChildCount() == 2) {
+ if (partNode != null) {
Table tab = null;
try {
tab = db.getTable(tableName);
@@ -1880,7 +1787,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
throw new SemanticException(e.getMessage(), e);
}
- ASTNode partNode = (ASTNode) ast.getChild(1);
HashMap<String, String> partSpec = null;
try {
partSpec = getValidatedPartSpec(tab, partNode, db.getConf(), false);
@@ -1961,21 +1867,49 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
}
+ /**
+ * A query like this will generate a tree as follows
+ * "describe formatted default.maptable partition (b=100) id;"
+ * TOK_TABTYPE
+ * TOK_TABNAME --> root for tablename, 2 child nodes mean DB specified
+ * default
+ * maptable
+ * TOK_PARTSPEC --> root node for partition spec. else columnName
+ * TOK_PARTVAL
+ * b
+ * 100
+ * id --> root node for columnName
+ * formatted
+ */
private void analyzeDescribeTable(ASTNode ast) throws SemanticException {
ASTNode tableTypeExpr = (ASTNode) ast.getChild(0);
- String qualifiedName =
- QualifiedNameUtil.getFullyQualifiedName((ASTNode) tableTypeExpr.getChild(0));
- String tableName =
- QualifiedNameUtil.getTableName(db, (ASTNode)(tableTypeExpr.getChild(0)));
- String dbName =
- QualifiedNameUtil.getDBName(db, (ASTNode)(tableTypeExpr.getChild(0)));
+ String dbName = null;
+ String tableName = null;
+ String colPath = null;
+ Map<String, String> partSpec = null;
+
+ ASTNode tableNode = null;
+
+ // process the first node to extract tablename
+ // tablename is either TABLENAME or DBNAME.TABLENAME if db is given
+ if (((ASTNode) tableTypeExpr.getChild(0)).getType() == HiveParser.TOK_TABNAME) {
+ tableNode = (ASTNode) tableTypeExpr.getChild(0);
+ if (tableNode.getChildCount() == 1) {
+ tableName = ((ASTNode) tableNode.getChild(0)).getText();
+ } else {
+ dbName = ((ASTNode) tableNode.getChild(0)).getText();
+ tableName = dbName + "." + ((ASTNode) tableNode.getChild(1)).getText();
+ }
+ } else {
+ throw new SemanticException(((ASTNode) tableTypeExpr.getChild(0)).getText() + " is not an expected token type");
+ }
- Map<String, String> partSpec =
- QualifiedNameUtil.getPartitionSpec(db, tableTypeExpr, tableName);
+ // process the second child,if exists, node to get partition spec(s)
+ partSpec = QualifiedNameUtil.getPartitionSpec(db, tableTypeExpr, tableName);
- String colPath = QualifiedNameUtil.getColPath(
- db, tableTypeExpr, (ASTNode) tableTypeExpr.getChild(0), qualifiedName, partSpec);
+ // process the third child node,if exists, to get partition spec(s)
+ colPath = QualifiedNameUtil.getColPath(db, tableTypeExpr, dbName, tableName, partSpec);
// if database is not the one currently using
// validate database
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 009e72e..f6ea2a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -1324,7 +1324,8 @@ fileFormat
tabTypeExpr
@init { pushMsg("specifying table types", state); }
@after { popMsg(state); }
- : identifier (DOT^
+ : identifier (DOT^ identifier)?
+ (identifier (DOT^
(
(KW_ELEM_TYPE) => KW_ELEM_TYPE
|
@@ -1332,7 +1333,8 @@ tabTypeExpr
|
(KW_VALUE_TYPE) => KW_VALUE_TYPE
| identifier
- ))* identifier?
+ ))*
+ )?
;
partTypeExpr
@@ -1341,6 +1343,12 @@ partTypeExpr
: tabTypeExpr partitionSpec? -> ^(TOK_TABTYPE tabTypeExpr partitionSpec?)
;
+tabPartColTypeExpr
+@init { pushMsg("specifying table partitions columnName", state); }
+@after { popMsg(state); }
+ : tableName partitionSpec? extColumnName? -> ^(TOK_TABTYPE tableName partitionSpec? extColumnName?)
+ ;
+
descStatement
@init { pushMsg("describe statement", state); }
@after { popMsg(state); }
@@ -1351,9 +1359,9 @@ descStatement
|
(KW_FUNCTION) => KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?)
|
- (KW_FORMATTED|KW_EXTENDED|KW_PRETTY) => ((descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY) parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions)
+ (KW_FORMATTED|KW_EXTENDED|KW_PRETTY) => ((descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY) parttype=tabPartColTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions)
|
- parttype=partTypeExpr -> ^(TOK_DESCTABLE $parttype)
+ parttype=tabPartColTypeExpr -> ^(TOK_DESCTABLE $parttype)
)
;
@@ -1934,6 +1942,13 @@ columnName
identifier
;
+extColumnName
+@init { pushMsg("column name for complex types", state); }
+@after { popMsg(state); }
+ :
+ identifier (DOT^ ((KW_ELEM_TYPE) => KW_ELEM_TYPE | (KW_KEY_TYPE) => KW_KEY_TYPE | (KW_VALUE_TYPE) => KW_VALUE_TYPE | identifier))*
+ ;
+
columnNameOrderList
@init { pushMsg("column name order list", state); }
@after { popMsg(state); }
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientnegative/desc_failure4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/desc_failure4.q b/ql/src/test/queries/clientnegative/desc_failure4.q
new file mode 100644
index 0000000..dbbc8ef
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/desc_failure4.q
@@ -0,0 +1,5 @@
+CREATE DATABASE IF NOT EXISTS db1;
+use db1;
+CREATE TABLE IF NOT EXISTS name1 (col1 string);
+
+DESCRIBE name1.col1;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientnegative/describe_xpath1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/describe_xpath1.q b/ql/src/test/queries/clientnegative/describe_xpath1.q
index ea72f83..84a848e 100644
--- a/ql/src/test/queries/clientnegative/describe_xpath1.q
+++ b/ql/src/test/queries/clientnegative/describe_xpath1.q
@@ -1 +1 @@
-describe src_thrift.$elem$;
+describe src_thrift $elem$;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientnegative/describe_xpath2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/describe_xpath2.q b/ql/src/test/queries/clientnegative/describe_xpath2.q
index f1fee1a..e223939 100644
--- a/ql/src/test/queries/clientnegative/describe_xpath2.q
+++ b/ql/src/test/queries/clientnegative/describe_xpath2.q
@@ -1 +1 @@
-describe src_thrift.$key$;
+describe src_thrift $key$;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientnegative/describe_xpath3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/describe_xpath3.q b/ql/src/test/queries/clientnegative/describe_xpath3.q
index 4a11f68..8400b9c 100644
--- a/ql/src/test/queries/clientnegative/describe_xpath3.q
+++ b/ql/src/test/queries/clientnegative/describe_xpath3.q
@@ -1 +1 @@
-describe src_thrift.lint.abc;
+describe src_thrift lint.abc;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientnegative/describe_xpath4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/describe_xpath4.q b/ql/src/test/queries/clientnegative/describe_xpath4.q
index 0912bf1..27d3fea 100644
--- a/ql/src/test/queries/clientnegative/describe_xpath4.q
+++ b/ql/src/test/queries/clientnegative/describe_xpath4.q
@@ -1 +1 @@
-describe src_thrift.mStringString.abc;
+describe src_thrift mStringString.abc;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/alter_partition_update_status.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_partition_update_status.q b/ql/src/test/queries/clientpositive/alter_partition_update_status.q
index 0d9c176..f8bf2a2 100644
--- a/ql/src/test/queries/clientpositive/alter_partition_update_status.q
+++ b/ql/src/test/queries/clientpositive/alter_partition_update_status.q
@@ -5,11 +5,11 @@ insert overwrite table src_stat_part_one partition (partitionId=1)
ANALYZE TABLE src_stat_part_one PARTITION(partitionId=1) COMPUTE STATISTICS for columns;
-describe formatted src_stat_part_one.key PARTITION(partitionId=1);
+describe formatted src_stat_part_one PARTITION(partitionId=1) key;
ALTER TABLE src_stat_part_one PARTITION(partitionId=1) UPDATE STATISTICS for column key SET ('numDVs'='11','avgColLen'='2.2');
-describe formatted src_stat_part_one.key PARTITION(partitionId=1);
+describe formatted src_stat_part_one PARTITION(partitionId=1) key;
create table src_stat_part_two(key string, value string) partitioned by (px int, py string);
@@ -18,11 +18,11 @@ insert overwrite table src_stat_part_two partition (px=1, py='a')
ANALYZE TABLE src_stat_part_two PARTITION(px=1) COMPUTE STATISTICS for columns;
-describe formatted src_stat_part_two.key PARTITION(px=1, py='a');
+describe formatted src_stat_part_two PARTITION(px=1, py='a') key;
ALTER TABLE src_stat_part_two PARTITION(px=1, py='a') UPDATE STATISTICS for column key SET ('numDVs'='30','maxColLen'='40');
-describe formatted src_stat_part_two.key PARTITION(px=1, py='a');
+describe formatted src_stat_part_two PARTITION(px=1, py='a') key;
create database if not exists dummydb;
@@ -30,8 +30,8 @@ use dummydb;
ALTER TABLE default.src_stat_part_two PARTITION(px=1, py='a') UPDATE STATISTICS for column key SET ('numDVs'='40','maxColLen'='50');
-describe formatted default.src_stat_part_two key PARTITION(px=1, py='a');
+describe formatted default.src_stat_part_two PARTITION(px=1, py='a') key;
use default;
-drop database dummydb;
\ No newline at end of file
+drop database dummydb;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/alter_table_invalidate_column_stats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_table_invalidate_column_stats.q b/ql/src/test/queries/clientpositive/alter_table_invalidate_column_stats.q
index 0a0d649..de966e8 100644
--- a/ql/src/test/queries/clientpositive/alter_table_invalidate_column_stats.q
+++ b/ql/src/test/queries/clientpositive/alter_table_invalidate_column_stats.q
@@ -35,32 +35,32 @@ alter table statsdb1.testtable1 rename to statsdb2.testtable2;
analyze table testpart1 compute statistics for columns;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part2');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part2');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part2');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col3;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col3;
alter table statsdb1.testpart1 partition (part = 'part2') rename to partition (part = 'part3');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part3');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part3');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part3');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col3;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col3;
alter table statsdb1.testpart1 replace columns (col1 int, col2 string, col4 string) cascade;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col4 partition (part = 'part1');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col4;
alter table statsdb1.testpart1 change column col1 col1 string;
set hive.exec.dynamic.partition = true;
alter table statsdb1.testpart1 partition (part) change column col1 col1 string;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col4 partition (part = 'part1');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col4;
alter table statsdb1.testpart1 rename to statsdb2.testpart2;
use statsdb2;
@@ -112,32 +112,32 @@ alter table statsdb1.testtable1 rename to statsdb2.testtable2;
analyze table testpart1 compute statistics for columns;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part2');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part2');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part2');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col3;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part2') col3;
alter table statsdb1.testpart1 partition (part = 'part2') rename to partition (part = 'part3');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col1 partition (part = 'part3');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part3');
-describe formatted statsdb1.testpart1 col3 partition (part = 'part3');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col3;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part3') col3;
alter table statsdb1.testpart1 replace columns (col1 int, col2 string, col4 string) cascade;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col4 partition (part = 'part1');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col4;
alter table statsdb1.testpart1 change column col1 col1 string;
set hive.exec.dynamic.partition = true;
alter table statsdb1.testpart1 partition (part) change column col1 col1 string;
-describe formatted statsdb1.testpart1 col1 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col2 partition (part = 'part1');
-describe formatted statsdb1.testpart1 col4 partition (part = 'part1');
+describe formatted statsdb1.testpart1 partition (part = 'part1') col1;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col2;
+describe formatted statsdb1.testpart1 partition (part = 'part1') col4;
alter table statsdb1.testpart1 rename to statsdb2.testpart2;
use statsdb2;
@@ -149,4 +149,4 @@ drop table statsdb2.testtable2;
use default;
drop database statsdb1;
-drop database statsdb2;
\ No newline at end of file
+drop database statsdb2;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/alter_table_update_status.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_table_update_status.q b/ql/src/test/queries/clientpositive/alter_table_update_status.q
index 2f0a9b3..18cf758 100644
--- a/ql/src/test/queries/clientpositive/alter_table_update_status.q
+++ b/ql/src/test/queries/clientpositive/alter_table_update_status.q
@@ -9,23 +9,23 @@ LOAD DATA LOCAL INPATH '../../data/files/kv3.txt' INTO TABLE src_stat_int;
ANALYZE TABLE src_stat COMPUTE STATISTICS for columns key;
-describe formatted src_stat.key;
+describe formatted src_stat key;
ALTER TABLE src_stat UPDATE STATISTICS for column key SET ('numDVs'='1111','avgColLen'='1.111');
-describe formatted src_stat.key;
+describe formatted src_stat key;
ALTER TABLE src_stat UPDATE STATISTICS for column value SET ('numDVs'='121','numNulls'='122','avgColLen'='1.23','maxColLen'='124');
-describe formatted src_stat.value;
+describe formatted src_stat value;
ANALYZE TABLE src_stat_int COMPUTE STATISTICS for columns key;
-describe formatted src_stat_int.key;
+describe formatted src_stat_int key;
ALTER TABLE src_stat_int UPDATE STATISTICS for column key SET ('numDVs'='2222','lowValue'='333.22','highValue'='22.22');
-describe formatted src_stat_int.key;
+describe formatted src_stat_int key;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/analyze_tbl_part.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/analyze_tbl_part.q b/ql/src/test/queries/clientpositive/analyze_tbl_part.q
index ecf1389..179c584 100644
--- a/ql/src/test/queries/clientpositive/analyze_tbl_part.q
+++ b/ql/src/test/queries/clientpositive/analyze_tbl_part.q
@@ -8,13 +8,13 @@ select * from src1;
ANALYZE TABLE src_stat_part partition (partitionId) COMPUTE STATISTICS for columns key;
-describe formatted src_stat_part.key PARTITION(partitionId=1);
+describe formatted src_stat_part PARTITION(partitionId=1) key;
ANALYZE TABLE src_stat_part partition (partitionId) COMPUTE STATISTICS for columns key, value;
-describe formatted src_stat_part.key PARTITION(partitionId=1);
+describe formatted src_stat_part PARTITION(partitionId=1) key;
-describe formatted src_stat_part.value PARTITION(partitionId=2);
+describe formatted src_stat_part PARTITION(partitionId=2) value;
create table src_stat_string_part(key string, value string) partitioned by (partitionName string);
@@ -26,4 +26,4 @@ select * from src1;
ANALYZE TABLE src_stat_string_part partition (partitionName="p'1") COMPUTE STATISTICS for columns key, value;
-ANALYZE TABLE src_stat_string_part partition (partitionName="p\"1") COMPUTE STATISTICS for columns key, value;
\ No newline at end of file
+ANALYZE TABLE src_stat_string_part partition (partitionName="p\"1") COMPUTE STATISTICS for columns key, value;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/colstats_all_nulls.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/colstats_all_nulls.q b/ql/src/test/queries/clientpositive/colstats_all_nulls.q
index c060ace..ae6b0f5 100644
--- a/ql/src/test/queries/clientpositive/colstats_all_nulls.q
+++ b/ql/src/test/queries/clientpositive/colstats_all_nulls.q
@@ -4,8 +4,8 @@ LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null;
create table all_nulls as SELECT a, cast(a as double) as b, cast(a as decimal) as c FROM src_null where a is null limit 5;
analyze table all_nulls compute statistics for columns;
-describe formatted all_nulls.a;
-describe formatted all_nulls.b;
+describe formatted all_nulls a;
+describe formatted all_nulls b;
drop table all_nulls;
drop table src_null;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/columnstats_part_coltype.q b/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
index cd191c0..ac4c370 100644
--- a/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
+++ b/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
@@ -9,33 +9,33 @@ insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') se
insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') select key, value from src limit 60;
analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') compute statistics for columns;
-describe formatted partcolstats.key partition (ds=date '2015-04-02', hr=2, part='partA');
-describe formatted partcolstats.value partition (ds=date '2015-04-02', hr=2, part='partA');
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') key;
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') value;
-describe formatted partcolstats.key partition (ds=date '2015-04-02', hr=2, part='partB');
-describe formatted partcolstats.value partition (ds=date '2015-04-02', hr=2, part='partB');
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key;
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value;
analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part) compute statistics for columns;
-describe formatted partcolstats.key partition (ds=date '2015-04-02', hr=2, part='partB');
-describe formatted partcolstats.value partition (ds=date '2015-04-02', hr=2, part='partB');
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key;
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value;
-describe formatted partcolstats.key partition (ds=date '2015-04-02', hr=3, part='partA');
-describe formatted partcolstats.value partition (ds=date '2015-04-02', hr=3, part='partA');
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key;
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value;
analyze table partcolstats partition (ds=date '2015-04-02', hr, part) compute statistics for columns;
-describe formatted partcolstats.key partition (ds=date '2015-04-02', hr=3, part='partA');
-describe formatted partcolstats.value partition (ds=date '2015-04-02', hr=3, part='partA');
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key;
+describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value;
-describe formatted partcolstats.key partition (ds=date '2015-04-03', hr=3, part='partA');
-describe formatted partcolstats.value partition (ds=date '2015-04-03', hr=3, part='partA');
-describe formatted partcolstats.key partition (ds=date '2015-04-03', hr=3, part='partB');
-describe formatted partcolstats.value partition (ds=date '2015-04-03', hr=3, part='partB');
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value;
analyze table partcolstats partition (ds, hr, part) compute statistics for columns;
-describe formatted partcolstats.key partition (ds=date '2015-04-03', hr=3, part='partA');
-describe formatted partcolstats.value partition (ds=date '2015-04-03', hr=3, part='partA');
-describe formatted partcolstats.key partition (ds=date '2015-04-03', hr=3, part='partB');
-describe formatted partcolstats.value partition (ds=date '2015-04-03', hr=3, part='partB');
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key;
+describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value;
drop table partcolstats;
@@ -45,7 +45,7 @@ create table partcolstatsnum (key int, value string) partitioned by (tint tinyin
insert into partcolstatsnum partition (tint=100, sint=1000, bint=1000000) select key, value from src limit 30;
analyze table partcolstatsnum partition (tint=100, sint=1000, bint=1000000) compute statistics for columns;
-describe formatted partcolstatsnum.value partition (tint=100, sint=1000, bint=1000000);
+describe formatted partcolstatsnum partition (tint=100, sint=1000, bint=1000000) value;
drop table partcolstatsnum;
@@ -55,7 +55,7 @@ create table partcolstatsdec (key int, value string) partitioned by (decpart dec
insert into partcolstatsdec partition (decpart='1000.0001') select key, value from src limit 30;
analyze table partcolstatsdec partition (decpart='1000.0001') compute statistics for columns;
-describe formatted partcolstatsdec.value partition (decpart='1000.0001');
+describe formatted partcolstatsdec partition (decpart='1000.0001') value;
drop table partcolstatsdec;
@@ -65,7 +65,7 @@ create table partcolstatschar (key int, value string) partitioned by (varpart va
insert into partcolstatschar partition (varpart='part1', charpart='aaa') select key, value from src limit 30;
analyze table partcolstatschar partition (varpart='part1', charpart='aaa') compute statistics for columns;
-describe formatted partcolstatschar.value partition (varpart='part1', charpart='aaa');
+describe formatted partcolstatschar partition (varpart='part1', charpart='aaa') value;
drop table partcolstatschar;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/columnstats_partlvl.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/columnstats_partlvl.q b/ql/src/test/queries/clientpositive/columnstats_partlvl.q
index bd41005..0947a83 100644
--- a/ql/src/test/queries/clientpositive/columnstats_partlvl.q
+++ b/ql/src/test/queries/clientpositive/columnstats_partlvl.q
@@ -23,22 +23,22 @@ explain
analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns;
analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns;
-describe formatted Employee_Part.employeeID partition (employeeSalary=2000.0);
-describe formatted Employee_Part.employeeName partition (employeeSalary=2000.0);
+describe formatted Employee_Part partition (employeeSalary=2000.0) employeeID;
+describe formatted Employee_Part partition (employeeSalary=2000.0) employeeName;
explain
analyze table Employee_Part compute statistics for columns;
analyze table Employee_Part compute statistics for columns;
-describe formatted Employee_Part.employeeID partition(employeeSalary=2000.0);
-describe formatted Employee_Part.employeeID partition(employeeSalary=4000.0);
+describe formatted Employee_Part partition(employeeSalary=2000.0) employeeID;
+describe formatted Employee_Part partition(employeeSalary=4000.0) employeeID;
set hive.analyze.stmt.collect.partlevel.stats=false;
explain
analyze table Employee_Part compute statistics for columns;
analyze table Employee_Part compute statistics for columns;
-describe formatted Employee_Part.employeeID;
+describe formatted Employee_Part employeeID;
set hive.analyze.stmt.collect.partlevel.stats=true;
@@ -48,7 +48,7 @@ use dummydb;
analyze table default.Employee_Part partition (employeeSalary=2000.0) compute statistics for columns;
-describe formatted default.Employee_Part employeeID partition (employeeSalary=2000.0);
+describe formatted default.Employee_Part partition (employeeSalary=2000.0) employeeID;
analyze table default.Employee_Part compute statistics for columns;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q b/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
index 73739b3..53b691c 100644
--- a/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
+++ b/ql/src/test/queries/clientpositive/columnstats_partlvl_dp.q
@@ -15,26 +15,26 @@ explain
analyze table Employee_Part partition (employeeSalary='4000.0', country) compute statistics for columns employeeName, employeeID;
analyze table Employee_Part partition (employeeSalary='4000.0', country) compute statistics for columns employeeName, employeeID;
-describe formatted Employee_Part.employeeName partition (employeeSalary='4000.0', country='USA');
+describe formatted Employee_Part partition (employeeSalary='4000.0', country='USA') employeeName;
-- don't specify all partitioning keys
explain
analyze table Employee_Part partition (employeeSalary='2000.0') compute statistics for columns employeeID;
analyze table Employee_Part partition (employeeSalary='2000.0') compute statistics for columns employeeID;
-describe formatted Employee_Part.employeeID partition (employeeSalary='2000.0', country='USA');
-describe formatted Employee_Part.employeeID partition (employeeSalary='2000.0', country='UK');
+describe formatted Employee_Part partition (employeeSalary='2000.0', country='USA') employeeID;
+describe formatted Employee_Part partition (employeeSalary='2000.0', country='UK') employeeID;
-- don't specify any partitioning keys
explain
analyze table Employee_Part partition (employeeSalary) compute statistics for columns employeeID;
analyze table Employee_Part partition (employeeSalary) compute statistics for columns employeeID;
-describe formatted Employee_Part.employeeID partition (employeeSalary='3000.0', country='UK');
+describe formatted Employee_Part partition (employeeSalary='3000.0', country='UK') employeeID;
explain
analyze table Employee_Part partition (employeeSalary,country) compute statistics for columns;
analyze table Employee_Part partition (employeeSalary,country) compute statistics for columns;
-describe formatted Employee_Part.employeeName partition (employeeSalary='3500.0', country='UK');
+describe formatted Employee_Part partition (employeeSalary='3500.0', country='UK') employeeName;
-- partially populated stats
drop table Employee;
@@ -48,13 +48,13 @@ LOAD DATA LOCAL INPATH "../../data/files/employee.dat" INTO TABLE Employee part
analyze table Employee partition (employeeSalary,country) compute statistics for columns;
-describe formatted Employee.employeeName partition (employeeSalary='3500.0', country='UK');
+describe formatted Employee partition (employeeSalary='3500.0', country='UK') employeeName;
LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee partition(employeeSalary='3000.0', country='USA');
LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee partition(employeeSalary='4000.0', country='USA');
analyze table Employee partition (employeeSalary) compute statistics for columns;
-describe formatted Employee.employeeName partition (employeeSalary='3000.0', country='USA');
+describe formatted Employee partition (employeeSalary='3000.0', country='USA') employeeName;
-- add columns
alter table Employee add columns (c int ,d string);
@@ -63,7 +63,7 @@ LOAD DATA LOCAL INPATH "../../data/files/employee_part.txt" INTO TABLE Employee
analyze table Employee partition (employeeSalary='6000.0',country='UK') compute statistics for columns;
-describe formatted Employee.employeeName partition (employeeSalary='6000.0', country='UK');
-describe formatted Employee.c partition (employeeSalary='6000.0', country='UK');
-describe formatted Employee.d partition (employeeSalary='6000.0', country='UK');
+describe formatted Employee partition (employeeSalary='6000.0', country='UK') employeeName;
+describe formatted Employee partition (employeeSalary='6000.0', country='UK') c;
+describe formatted Employee partition (employeeSalary='6000.0', country='UK') d;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/compustat_avro.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/compustat_avro.q b/ql/src/test/queries/clientpositive/compustat_avro.q
index 4d781a3..8bf3344 100644
--- a/ql/src/test/queries/clientpositive/compustat_avro.q
+++ b/ql/src/test/queries/clientpositive/compustat_avro.q
@@ -14,8 +14,8 @@ create table testAvro
'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
TBLPROPERTIES ('avro.schema.url'='${system:test.tmp.dir}/grad.avsc');
-describe formatted testAvro.col1;
+describe formatted testAvro col1;
analyze table testAvro compute statistics for columns col1,col3;
-describe formatted testAvro.col1;
+describe formatted testAvro col1;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/confirm_initial_tbl_stats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/confirm_initial_tbl_stats.q b/ql/src/test/queries/clientpositive/confirm_initial_tbl_stats.q
index 6f7bb53..6b094f7 100644
--- a/ql/src/test/queries/clientpositive/confirm_initial_tbl_stats.q
+++ b/ql/src/test/queries/clientpositive/confirm_initial_tbl_stats.q
@@ -1,40 +1,40 @@
describe extended src;
-describe formatted src.key;
+describe formatted src key;
describe extended src1;
-describe formatted src1.value;
+describe formatted src1 value;
describe extended src_json;
-describe formatted src_json.json;
+describe formatted src_json json;
describe extended src_sequencefile;
-describe formatted src_sequencefile.value;
+describe formatted src_sequencefile value;
describe extended srcbucket;
-describe formatted srcbucket.value;
+describe formatted srcbucket value;
describe extended srcbucket2;
-describe formatted srcbucket2.value;
+describe formatted srcbucket2 value;
describe extended srcpart;
-describe formatted srcpart.key PARTITION (ds="2008-04-09", hr="12");
+describe formatted srcpart PARTITION (ds="2008-04-09", hr="12") key;
describe extended alltypesorc;
-describe formatted alltypesorc.ctinyint;
+describe formatted alltypesorc ctinyint;
-describe formatted alltypesorc.cfloat;
+describe formatted alltypesorc cfloat;
-describe formatted alltypesorc.ctimestamp1;
+describe formatted alltypesorc ctimestamp1;
-describe formatted alltypesorc.cboolean2;
+describe formatted alltypesorc cboolean2;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/describe_syntax.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/describe_syntax.q b/ql/src/test/queries/clientpositive/describe_syntax.q
index c9255b5..9659e9a 100644
--- a/ql/src/test/queries/clientpositive/describe_syntax.q
+++ b/ql/src/test/queries/clientpositive/describe_syntax.q
@@ -28,12 +28,10 @@ DESCRIBE db1.t1 key1;
DESCRIBE EXTENDED db1.t1 key1;
DESCRIBE FORMATTED db1.t1 key1;
--- describe table.column
--- after first checking t1.key1 for database.table not valid
--- fall back to the old syntax table.column
-DESCRIBE t1.key1;
-DESCRIBE EXTENDED t1.key1;
-DESCRIBE FORMATTED t1.key1;
+-- describe table column
+DESCRIBE t1 key1;
+DESCRIBE EXTENDED t1 key1;
+DESCRIBE FORMATTED t1 key1;
-- describe table partition
DESCRIBE t1 PARTITION(ds='4', part='5');
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/describe_table.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/describe_table.q b/ql/src/test/queries/clientpositive/describe_table.q
index aa1f574..c388207 100644
--- a/ql/src/test/queries/clientpositive/describe_table.q
+++ b/ql/src/test/queries/clientpositive/describe_table.q
@@ -1,25 +1,25 @@
describe srcpart;
-describe srcpart.key;
+describe srcpart key;
describe srcpart PARTITION(ds='2008-04-08', hr='12');
describe `srcpart`;
-describe `srcpart`.`key`;
+describe `srcpart` `key`;
describe `srcpart` PARTITION(ds='2008-04-08', hr='12');
describe extended srcpart;
-describe extended srcpart.key;
+describe extended srcpart key;
describe extended srcpart PARTITION(ds='2008-04-08', hr='12');
describe extended `srcpart`;
-describe extended `srcpart`.`key`;
+describe extended `srcpart` `key`;
describe extended `srcpart` PARTITION(ds='2008-04-08', hr='12');
describe formatted srcpart;
-describe formatted srcpart.key;
+describe formatted srcpart key;
describe formatted srcpart PARTITION(ds='2008-04-08', hr='12');
describe formatted `srcpart`;
-describe formatted `srcpart`.`key`;
+describe formatted `srcpart` `key`;
describe formatted `srcpart` PARTITION(ds='2008-04-08', hr='12');
create table srcpart_serdeprops like srcpart;
@@ -29,3 +29,55 @@ alter table srcpart_serdeprops set serdeproperties('abcd'='2');
alter table srcpart_serdeprops set serdeproperties('A1234'='3');
describe formatted srcpart_serdeprops;
drop table srcpart_serdeprops;
+
+CREATE DATABASE IF NOT EXISTS name1;
+CREATE DATABASE IF NOT EXISTS name2;
+use name1;
+CREATE TABLE IF NOT EXISTS name1 (name1 int, name2 string) PARTITIONED BY (name3 int);
+ALTER TABLE name1 ADD PARTITION (name3=1);
+CREATE TABLE IF NOT EXISTS name2 (name3 int, name4 string);
+use name2;
+CREATE TABLE IF NOT EXISTS table1 (col1 int, col2 string);
+
+use default;
+DESCRIBE name1.name1;
+DESCRIBE name1.name1 name2;
+DESCRIBE name1.name1 PARTITION (name3=1);
+DESCRIBE name1.name2;
+DESCRIBE name1.name2 name3;
+DESCRIBE name1.name2 name4;
+
+use name1;
+DESCRIBE name1;
+DESCRIBE name1 name2;
+DESCRIBE name1 PARTITION (name3=1);
+DESCRIBE name1.name1;
+DESCRIBE name1.name1 name2;
+DESCRIBE name1.name1 PARTITION (name3=1);
+DESCRIBE name2;
+DESCRIBE name2 name3;
+DESCRIBE name2 name4;
+DESCRIBE name1.name2;
+DESCRIBE name1.name2 name3;
+DESCRIBE name1.name2 name4;
+
+DESCRIBE name2.table1;
+DESCRIBE name2.table1 col1;
+DESCRIBE name2.table1 col2;
+use name2;
+DESCRIBE table1;
+DESCRIBE table1 col1;
+DESCRIBE table1 col2;
+
+DESCRIBE name2.table1;
+DESCRIBE name2.table1 col1;
+DESCRIBE name2.table1 col2;
+
+DROP TABLE IF EXISTS table1;
+use name1;
+DROP TABLE IF EXISTS name1;
+DROP TABLE IF EXISTS name2;
+use name2;
+DROP TABLE IF EXISTS table1;
+DROP DATABASE IF EXISTS name1;
+DROP DATABASE IF EXISTS name2;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/describe_xpath.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/describe_xpath.q b/ql/src/test/queries/clientpositive/describe_xpath.q
index 469cbf4..b609f85 100644
--- a/ql/src/test/queries/clientpositive/describe_xpath.q
+++ b/ql/src/test/queries/clientpositive/describe_xpath.q
@@ -1,17 +1,17 @@
-- Describe a list structure in a thrift table
-describe src_thrift.lint;
+describe src_thrift lint;
-- Describe the element of a list
-describe src_thrift.lint.$elem$;
+describe src_thrift lint.$elem$;
-- Describe the key of a map
-describe src_thrift.mStringString.$key$;
+describe src_thrift mStringString.$key$;
-- Describe the value of a map
-describe src_thrift.mStringString.$value$;
+describe src_thrift mStringString.$value$;
-- Describe a complex element of a list
-describe src_thrift.lintString.$elem$;
+describe src_thrift lintString.$elem$;
-- Describe a member of an element of a list
-describe src_thrift.lintString.$elem$.myint;
+describe src_thrift lintString.$elem$.myint;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q b/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
index 00c9b53..c4f76b5 100644
--- a/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
+++ b/ql/src/test/queries/clientpositive/extrapolate_part_stats_full.q
@@ -23,7 +23,7 @@ analyze table loc_orc_1d partition(year='2000') compute statistics for columns s
analyze table loc_orc_1d partition(year='2001') compute statistics for columns state,locid;
-describe formatted loc_orc_1d.state PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') state;
-- basicStatState: COMPLETE colStatState: PARTIAL
explain extended select state from loc_orc_1d;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q b/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
index 5c062ee..72350e8 100644
--- a/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
+++ b/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial.q
@@ -25,9 +25,9 @@ analyze table loc_orc_1d partition(year='2001') compute statistics for columns s
analyze table loc_orc_1d partition(year='2002') compute statistics for columns state,locid;
-describe formatted loc_orc_1d.state PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') state;
-describe formatted loc_orc_1d.state PARTITION(year='2002');
+describe formatted loc_orc_1d PARTITION(year='2002') state;
-- basicStatState: COMPLETE colStatState: PARTIAL
explain extended select state from loc_orc_1d;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q b/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
index 5f0160a..6e242a8 100644
--- a/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
+++ b/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
@@ -32,21 +32,21 @@ analyze table loc_orc_1d partition(year='2001') compute statistics for columns s
analyze table loc_orc_1d partition(year='2002') compute statistics for columns state,locid,cnt,zip;
-describe formatted loc_orc_1d.state PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') state;
-describe formatted loc_orc_1d.state PARTITION(year='2002');
+describe formatted loc_orc_1d PARTITION(year='2002') state;
-describe formatted loc_orc_1d.locid PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') locid;
-describe formatted loc_orc_1d.locid PARTITION(year='2002');
+describe formatted loc_orc_1d PARTITION(year='2002') locid;
-describe formatted loc_orc_1d.cnt PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') cnt;
-describe formatted loc_orc_1d.cnt PARTITION(year='2002');
+describe formatted loc_orc_1d PARTITION(year='2002') cnt;
-describe formatted loc_orc_1d.zip PARTITION(year='2001');
+describe formatted loc_orc_1d PARTITION(year='2001') zip;
-describe formatted loc_orc_1d.zip PARTITION(year='2002');
+describe formatted loc_orc_1d PARTITION(year='2002') zip;
explain extended select state,locid,cnt,zip from loc_orc_1d;
@@ -54,21 +54,21 @@ analyze table loc_orc_1d partition(year='2000') compute statistics for columns s
analyze table loc_orc_1d partition(year='2003') compute statistics for columns state,locid,cnt,zip;
-describe formatted loc_orc_1d.state PARTITION(year='2000');
+describe formatted loc_orc_1d PARTITION(year='2000') state;
-describe formatted loc_orc_1d.state PARTITION(year='2003');
+describe formatted loc_orc_1d PARTITION(year='2003') state;
-describe formatted loc_orc_1d.locid PARTITION(year='2000');
+describe formatted loc_orc_1d PARTITION(year='2000') locid;
-describe formatted loc_orc_1d.locid PARTITION(year='2003');
+describe formatted loc_orc_1d PARTITION(year='2003') locid;
-describe formatted loc_orc_1d.cnt PARTITION(year='2000');
+describe formatted loc_orc_1d PARTITION(year='2000') cnt;
-describe formatted loc_orc_1d.cnt PARTITION(year='2003');
+describe formatted loc_orc_1d PARTITION(year='2003') cnt;
-describe formatted loc_orc_1d.zip PARTITION(year='2000');
+describe formatted loc_orc_1d PARTITION(year='2000') zip;
-describe formatted loc_orc_1d.zip PARTITION(year='2003');
+describe formatted loc_orc_1d PARTITION(year='2003') zip;
explain extended select state,locid,cnt,zip from loc_orc_1d;
@@ -86,16 +86,16 @@ analyze table loc_orc_2d partition(zip=94086, year='2001') compute statistics fo
analyze table loc_orc_2d partition(zip=94087, year='2002') compute statistics for columns state,locid,cnt;
-describe formatted loc_orc_2d.state partition(zip=94086, year='2001');
+describe formatted loc_orc_2d partition(zip=94086, year='2001') state;
-describe formatted loc_orc_2d.state partition(zip=94087, year='2002');
+describe formatted loc_orc_2d partition(zip=94087, year='2002') state;
-describe formatted loc_orc_2d.locid partition(zip=94086, year='2001');
+describe formatted loc_orc_2d partition(zip=94086, year='2001') locid;
-describe formatted loc_orc_2d.locid partition(zip=94087, year='2002');
+describe formatted loc_orc_2d partition(zip=94087, year='2002') locid;
-describe formatted loc_orc_2d.cnt partition(zip=94086, year='2001');
+describe formatted loc_orc_2d partition(zip=94086, year='2001') cnt;
-describe formatted loc_orc_2d.cnt partition(zip=94087, year='2002');
+describe formatted loc_orc_2d partition(zip=94087, year='2002') cnt;
explain extended select state,locid,cnt,zip from loc_orc_2d;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/partition_coltype_literals.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/partition_coltype_literals.q b/ql/src/test/queries/clientpositive/partition_coltype_literals.q
index bfd208d..b918dd3 100644
--- a/ql/src/test/queries/clientpositive/partition_coltype_literals.q
+++ b/ql/src/test/queries/clientpositive/partition_coltype_literals.q
@@ -32,8 +32,8 @@ analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=33000000000
describe extended partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L);
analyze table partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) compute statistics for columns;
-describe formatted partcoltypenum.key partition (tint=110Y, sint=22000S, bint=330000000000L);
-describe formatted partcoltypenum.value partition (tint=110Y, sint=22000S, bint=330000000000L);
+describe formatted partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) key;
+describe formatted partcoltypenum partition (tint=110Y, sint=22000S, bint=330000000000L) value;
-- change table column type for partition
alter table partcoltypenum change key key decimal(10,0);
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/queries/clientpositive/stats_only_null.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats_only_null.q b/ql/src/test/queries/clientpositive/stats_only_null.q
index caeb43e..29ff5f8 100644
--- a/ql/src/test/queries/clientpositive/stats_only_null.q
+++ b/ql/src/test/queries/clientpositive/stats_only_null.q
@@ -44,7 +44,7 @@ CREATE TABLE stats_null_part(a double, b int, c STRING, d smallint) partitioned
insert into table stats_null_part partition(dt) select a,b,c,d,b from temps_null ;
analyze table stats_null_part compute statistics for columns;
-describe formatted stats_null_part.a partition(dt = 1);
+describe formatted stats_null_part partition(dt = 1) a;
reset hive.exec.dynamic.partition.mode;
drop table stats_null;
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/desc_failure3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/desc_failure3.q.out b/ql/src/test/results/clientnegative/desc_failure3.q.out
index 443857b..cf93ad3 100644
--- a/ql/src/test/results/clientnegative/desc_failure3.q.out
+++ b/ql/src/test/results/clientnegative/desc_failure3.q.out
@@ -12,4 +12,4 @@ POSTHOOK: query: CREATE TABLE db1.t1(key1 INT, value1 STRING) PARTITIONED BY (ds
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:db1
POSTHOOK: Output: db1@t1
-FAILED: SemanticException [Error 10004]: Invalid table alias or column reference db1.t1.key1
+FAILED: ParseException line 4:15 missing EOF at '.' near 't1'
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/desc_failure4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/desc_failure4.q.out b/ql/src/test/results/clientnegative/desc_failure4.q.out
new file mode 100644
index 0000000..0c666c8
--- /dev/null
+++ b/ql/src/test/results/clientnegative/desc_failure4.q.out
@@ -0,0 +1,21 @@
+PREHOOK: query: CREATE DATABASE IF NOT EXISTS db1
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:db1
+POSTHOOK: query: CREATE DATABASE IF NOT EXISTS db1
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:db1
+PREHOOK: query: use db1
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:db1
+POSTHOOK: query: use db1
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:db1
+PREHOOK: query: CREATE TABLE IF NOT EXISTS name1 (col1 string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@name1
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS name1 (col1 string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:db1
+POSTHOOK: Output: db1@name1
+FAILED: SemanticException [Error 10072]: Database does not exist: name1
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/describe_xpath1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/describe_xpath1.q.out b/ql/src/test/results/clientnegative/describe_xpath1.q.out
index e83e9d6..322e6e8 100644
--- a/ql/src/test/results/clientnegative/describe_xpath1.q.out
+++ b/ql/src/test/results/clientnegative/describe_xpath1.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: describe src_thrift.$elem$
+PREHOOK: query: describe src_thrift $elem$
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_thrift
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. cannot find field $elem$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropVal
ueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3]
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/describe_xpath2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/describe_xpath2.q.out b/ql/src/test/results/clientnegative/describe_xpath2.q.out
index 3c3f263..c1f2ec1 100644
--- a/ql/src/test/results/clientnegative/describe_xpath2.q.out
+++ b/ql/src/test/results/clientnegative/describe_xpath2.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: describe src_thrift.$key$
+PREHOOK: query: describe src_thrift $key$
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_thrift
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. cannot find field $key$ from [private int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, private java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, private java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString, private java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.attributes, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField1, private org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField2, private org.apache.hadoop.hive.serde2.thrift.test.PropValu
eUnion org.apache.hadoop.hive.serde2.thrift.test.Complex.unionField3]
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/describe_xpath3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/describe_xpath3.q.out b/ql/src/test/results/clientnegative/describe_xpath3.q.out
index fb8c799..a300633 100644
--- a/ql/src/test/results/clientnegative/describe_xpath3.q.out
+++ b/ql/src/test/results/clientnegative/describe_xpath3.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: describe src_thrift.lint.abc
+PREHOOK: query: describe src_thrift lint.abc
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_thrift
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error in getting fields from serde.Unknown type for abc
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/describe_xpath4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/describe_xpath4.q.out b/ql/src/test/results/clientnegative/describe_xpath4.q.out
index 664a4b7..b569eca 100644
--- a/ql/src/test/results/clientnegative/describe_xpath4.q.out
+++ b/ql/src/test/results/clientnegative/describe_xpath4.q.out
@@ -1,4 +1,4 @@
-PREHOOK: query: describe src_thrift.mStringString.abc
+PREHOOK: query: describe src_thrift mStringString.abc
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_thrift
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Error in getting fields from serde.Unknown type for abc
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientnegative/drop_database_cascade.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/drop_database_cascade.q.out b/ql/src/test/results/clientnegative/drop_database_cascade.q.out
index 304b967..170a017 100644
--- a/ql/src/test/results/clientnegative/drop_database_cascade.q.out
+++ b/ql/src/test/results/clientnegative/drop_database_cascade.q.out
@@ -82,4 +82,4 @@ PREHOOK: type: DESCFUNCTION
POSTHOOK: query: describe function TEST_database.test_func
POSTHOOK: type: DESCFUNCTION
Function 'TEST_database.test_func' does not exist.
-FAILED: SemanticException [Error 10001]: Table not found TEST_database.test_table
+FAILED: SemanticException [Error 10072]: Database does not exist: TEST_database
http://git-wip-us.apache.org/repos/asf/hive/blob/e9ca6870/ql/src/test/results/clientpositive/alter_partition_update_status.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_partition_update_status.q.out b/ql/src/test/results/clientpositive/alter_partition_update_status.q.out
index 49d5268..922822e 100644
--- a/ql/src/test/results/clientpositive/alter_partition_update_status.q.out
+++ b/ql/src/test/results/clientpositive/alter_partition_update_status.q.out
@@ -28,10 +28,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_stat_part_one
POSTHOOK: Input: default@src_stat_part_one@partitionid=1
#### A masked pattern was here ####
-PREHOOK: query: describe formatted src_stat_part_one.key PARTITION(partitionId=1)
+PREHOOK: query: describe formatted src_stat_part_one PARTITION(partitionId=1) key
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_stat_part_one
-POSTHOOK: query: describe formatted src_stat_part_one.key PARTITION(partitionId=1)
+POSTHOOK: query: describe formatted src_stat_part_one PARTITION(partitionId=1) key
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@src_stat_part_one
# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
@@ -41,10 +41,10 @@ PREHOOK: query: ALTER TABLE src_stat_part_one PARTITION(partitionId=1) UPDATE ST
PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS
POSTHOOK: query: ALTER TABLE src_stat_part_one PARTITION(partitionId=1) UPDATE STATISTICS for column key SET ('numDVs'='11','avgColLen'='2.2')
POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS
-PREHOOK: query: describe formatted src_stat_part_one.key PARTITION(partitionId=1)
+PREHOOK: query: describe formatted src_stat_part_one PARTITION(partitionId=1) key
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_stat_part_one
-POSTHOOK: query: describe formatted src_stat_part_one.key PARTITION(partitionId=1)
+POSTHOOK: query: describe formatted src_stat_part_one PARTITION(partitionId=1) key
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@src_stat_part_one
# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
@@ -80,10 +80,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_stat_part_two
POSTHOOK: Input: default@src_stat_part_two@px=1/py=a
#### A masked pattern was here ####
-PREHOOK: query: describe formatted src_stat_part_two.key PARTITION(px=1, py='a')
+PREHOOK: query: describe formatted src_stat_part_two PARTITION(px=1, py='a') key
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_stat_part_two
-POSTHOOK: query: describe formatted src_stat_part_two.key PARTITION(px=1, py='a')
+POSTHOOK: query: describe formatted src_stat_part_two PARTITION(px=1, py='a') key
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@src_stat_part_two
# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
@@ -93,10 +93,10 @@ PREHOOK: query: ALTER TABLE src_stat_part_two PARTITION(px=1, py='a') UPDATE STA
PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS
POSTHOOK: query: ALTER TABLE src_stat_part_two PARTITION(px=1, py='a') UPDATE STATISTICS for column key SET ('numDVs'='30','maxColLen'='40')
POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS
-PREHOOK: query: describe formatted src_stat_part_two.key PARTITION(px=1, py='a')
+PREHOOK: query: describe formatted src_stat_part_two PARTITION(px=1, py='a') key
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_stat_part_two
-POSTHOOK: query: describe formatted src_stat_part_two.key PARTITION(px=1, py='a')
+POSTHOOK: query: describe formatted src_stat_part_two PARTITION(px=1, py='a') key
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@src_stat_part_two
# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
@@ -118,10 +118,10 @@ PREHOOK: query: ALTER TABLE default.src_stat_part_two PARTITION(px=1, py='a') UP
PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS
POSTHOOK: query: ALTER TABLE default.src_stat_part_two PARTITION(px=1, py='a') UPDATE STATISTICS for column key SET ('numDVs'='40','maxColLen'='50')
POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS
-PREHOOK: query: describe formatted default.src_stat_part_two key PARTITION(px=1, py='a')
+PREHOOK: query: describe formatted default.src_stat_part_two PARTITION(px=1, py='a') key
PREHOOK: type: DESCTABLE
PREHOOK: Input: default@src_stat_part_two
-POSTHOOK: query: describe formatted default.src_stat_part_two key PARTITION(px=1, py='a')
+POSTHOOK: query: describe formatted default.src_stat_part_two PARTITION(px=1, py='a') key
POSTHOOK: type: DESCTABLE
POSTHOOK: Input: default@src_stat_part_two
# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment