You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2012/11/07 05:55:04 UTC

svn commit: r1406465 [15/15] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen/...

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_dp.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,54 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
+FAILED: SemanticException [Error 30008]: Dynamic partitioning is not supported yet while gathering column statistics through ANALYZE statement

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_incorrect_num_keys.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,54 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
+FAILED: SemanticException [Error 30006]: Incorrect number of partitioning key specified in ANALYZE statement

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,54 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
+FAILED: SemanticException [Error 30007]: Invalid partitioning key/value specified in ANALYZE statement

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_partlvl_multiple_part_clause.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,55 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
+FAILED: ParseException line 5:79 mismatched input 'partition' expecting KW_COMPUTE near ')' in analyze statement
+

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,36 @@
+PREHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: type: LOAD
+PREHOOK: Output: default@uservisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@uservisits_web_text_none
+FAILED: SemanticException [Error 10004]: Line 1:21 Invalid table alias or column reference 'destIP': (possible column names are: sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite)

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_complex_type.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,26 @@
+PREHOOK: query: DROP TABLE IF EXISTS table_complex_type
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS table_complex_type
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table_complex_type (
+       a STRING,
+       b ARRAY<STRING>,
+       c ARRAY<MAP<STRING,STRING>>,
+       d MAP<STRING,ARRAY<STRING>>
+       ) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table_complex_type (
+       a STRING,
+       b ARRAY<STRING>,
+       c ARRAY<MAP<STRING,STRING>>,
+       d MAP<STRING,ARRAY<STRING>>
+       ) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table_complex_type
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
+PREHOOK: type: LOAD
+PREHOOK: Output: default@table_complex_type
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/create_nested_type.txt' OVERWRITE INTO TABLE table_complex_type
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@table_complex_type
+FAILED: UDFArgumentTypeException Only primitive type arguments are accepted but map<string,array<string>> is passed.

Added: hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,36 @@
+PREHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: type: LOAD
+PREHOOK: Output: default@uservisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@uservisits_web_text_none
+FAILED: SemanticException [Error 10004]: Line 1:21 Invalid table alias or column reference 'destIP': (possible column names are: sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite)

Added: hive/trunk/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/columnstats_partlvl.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/columnstats_partlvl.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/columnstats_partlvl.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,435 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=2000.0)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
+PREHOOK: type: LOAD
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/employee.dat" INTO TABLE Employee_Part partition(employeeSalary=4000.0)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0
+PREHOOK: query: explain 
+analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+POSTHOOK: query: explain 
+analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME Employee_Part) (TOK_PARTSPEC (TOK_PARTVAL employeeSalary 2000.0))) (TOK_TABCOLNAME employeeID))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        employee_part 
+          TableScan
+            alias: employee_part
+            Select Operator
+              expressions:
+                    expr: employeeid
+                    type: int
+              outputColumnNames: employeeid
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(employeeid, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: employeeID
+          Column Types: int
+          Partition: employeesalary=2000.0
+          Table: Employee_Part
+
+
+PREHOOK: query: explain extended
+analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME Employee_Part) (TOK_PARTSPEC (TOK_PARTVAL employeeSalary 2000.0))) (TOK_TABCOLNAME employeeID))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        employee_part 
+          TableScan
+            alias: employee_part
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: employeeid
+                    type: int
+              outputColumnNames: employeeid
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(employeeid, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: employeesalary=2000.0
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              employeesalary 2000.0
+            properties:
+              bucket_count -1
+              columns employeeid,employeename
+              columns.types int:string
+              field.delim |
+#### A masked pattern was here ####
+              name default.employee_part
+              numFiles 1
+              numPartitions 2
+              numRows 0
+              partition_columns employeesalary
+              rawDataSize 0
+              serialization.ddl struct employee_part { i32 employeeid, string employeename}
+              serialization.format |
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 105
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns employeeid,employeename
+                columns.types int:string
+                field.delim |
+#### A masked pattern was here ####
+                name default.employee_part
+                numFiles 2
+                numPartitions 2
+                numRows 0
+                partition_columns employeesalary
+                rawDataSize 0
+                serialization.ddl struct employee_part { i32 employeeid, string employeename}
+                serialization.format |
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 210
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.employee_part
+            name: default.employee_part
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+      Truncated Path -> Alias:
+        /employee_part/employeesalary=2000.0 [employee_part]
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: employeeID
+          Column Types: int
+          Partition: employeesalary=2000.0
+          Table: Employee_Part
+          Is Table Level Stats: false
+
+
+PREHOOK: query: analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+PREHOOK: Input: default@employee_part@employeesalary=2000.0
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table Employee_Part partition (employeeSalary=2000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@employee_part@employeesalary=2000.0
+#### A masked pattern was here ####
+PREHOOK: query: explain 
+analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+POSTHOOK: query: explain 
+analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME Employee_Part) (TOK_PARTSPEC (TOK_PARTVAL employeeSalary 4000.0))) (TOK_TABCOLNAME employeeID))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        employee_part 
+          TableScan
+            alias: employee_part
+            Select Operator
+              expressions:
+                    expr: employeeid
+                    type: int
+              outputColumnNames: employeeid
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(employeeid, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: employeeID
+          Column Types: int
+          Partition: employeesalary=4000.0
+          Table: Employee_Part
+
+
+PREHOOK: query: explain extended
+analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME Employee_Part) (TOK_PARTSPEC (TOK_PARTVAL employeeSalary 4000.0))) (TOK_TABCOLNAME employeeID))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        employee_part 
+          TableScan
+            alias: employee_part
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: employeeid
+                    type: int
+              outputColumnNames: employeeid
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(employeeid, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: employeesalary=4000.0
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              employeesalary 4000.0
+            properties:
+              bucket_count -1
+              columns employeeid,employeename
+              columns.types int:string
+              field.delim |
+#### A masked pattern was here ####
+              name default.employee_part
+              numFiles 1
+              numPartitions 2
+              numRows 0
+              partition_columns employeesalary
+              rawDataSize 0
+              serialization.ddl struct employee_part { i32 employeeid, string employeename}
+              serialization.format |
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 105
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns employeeid,employeename
+                columns.types int:string
+                field.delim |
+#### A masked pattern was here ####
+                name default.employee_part
+                numFiles 2
+                numPartitions 2
+                numRows 0
+                partition_columns employeesalary
+                rawDataSize 0
+                serialization.ddl struct employee_part { i32 employeeid, string employeename}
+                serialization.format |
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 210
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.employee_part
+            name: default.employee_part
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+      Truncated Path -> Alias:
+        /employee_part/employeesalary=4000.0 [employee_part]
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: employeeID
+          Column Types: int
+          Partition: employeesalary=4000.0
+          Table: Employee_Part
+          Is Table Level Stats: false
+
+
+PREHOOK: query: analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+PREHOOK: type: QUERY
+PREHOOK: Input: default@employee_part@employeesalary=4000.0
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table Employee_Part partition (employeeSalary=4000.0) compute statistics for columns employeeID
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@employee_part@employeesalary=4000.0
+#### A masked pattern was here ####

Added: hive/trunk/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,266 @@
+PREHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS UserVisits_web_text_none
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE UserVisits_web_text_none (
+  sourceIP string,
+  destURL string,
+  visitDate string,
+  adRevenue float,
+  userAgent string,
+  cCode string,
+  lCode string,
+  sKeyword string,
+  avgTimeOnSite int)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@UserVisits_web_text_none
+PREHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+PREHOOK: type: LOAD
+PREHOOK: Output: default@uservisits_web_text_none
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@uservisits_web_text_none
+PREHOOK: query: explain 
+analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+PREHOOK: type: QUERY
+POSTHOOK: query: explain 
+analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME UserVisits_web_text_none)) (TOK_TABCOLNAME sourceIP avgTimeOnSite adRevenue))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        uservisits_web_text_none 
+          TableScan
+            alias: uservisits_web_text_none
+            Select Operator
+              expressions:
+                    expr: sourceip
+                    type: string
+                    expr: avgtimeonsite
+                    type: int
+                    expr: adrevenue
+                    type: float
+              outputColumnNames: sourceip, avgtimeonsite, adrevenue
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(sourceip, 16)
+                      expr: compute_stats(avgtimeonsite, 16)
+                      expr: compute_stats(adrevenue, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+                        expr: _col1
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+                        expr: _col2
+                        type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+                expr: compute_stats(VALUE._col1)
+                expr: compute_stats(VALUE._col2)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint>
+                  expr: _col1
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+                  expr: _col2
+                  type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: sourceIP, avgTimeOnSite, adRevenue
+          Column Types: string, int, float
+          Table: UserVisits_web_text_none
+
+
+PREHOOK: query: explain extended
+analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME UserVisits_web_text_none)) (TOK_TABCOLNAME sourceIP avgTimeOnSite adRevenue))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Map Reduce
+      Alias -> Map Operator Tree:
+        uservisits_web_text_none 
+          TableScan
+            alias: uservisits_web_text_none
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: sourceip
+                    type: string
+                    expr: avgtimeonsite
+                    type: int
+                    expr: adrevenue
+                    type: float
+              outputColumnNames: sourceip, avgtimeonsite, adrevenue
+              Group By Operator
+                aggregations:
+                      expr: compute_stats(sourceip, 16)
+                      expr: compute_stats(avgtimeonsite, 16)
+                      expr: compute_stats(adrevenue, 16)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+                        expr: _col1
+                        type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
+                        expr: _col2
+                        type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:string,numbitvectors:int>
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: uservisits_web_text_none
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite
+              columns.types string:string:string:float:string:string:string:string:int
+              field.delim |
+#### A masked pattern was here ####
+              name default.uservisits_web_text_none
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
+              serialization.format |
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 7060
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns sourceip,desturl,visitdate,adrevenue,useragent,ccode,lcode,skeyword,avgtimeonsite
+                columns.types string:string:string:float:string:string:string:string:int
+                field.delim |
+#### A masked pattern was here ####
+                name default.uservisits_web_text_none
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite}
+                serialization.format |
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 7060
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.uservisits_web_text_none
+            name: default.uservisits_web_text_none
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: compute_stats(VALUE._col0)
+                expr: compute_stats(VALUE._col1)
+                expr: compute_stats(VALUE._col2)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint>
+                  expr: _col1
+                  type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>
+                  expr: _col2
+                  type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint>
+            outputColumnNames: _col0, _col1, _col2
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2
+                    columns.types struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint>:struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint>:struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint>
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+      Truncated Path -> Alias:
+        /uservisits_web_text_none [uservisits_web_text_none]
+
+  Stage: Stage-1
+    Column Stats Work
+      Column Stats Desc:
+          Columns: sourceIP, avgTimeOnSite, adRevenue
+          Column Types: string, int, float
+          Table: UserVisits_web_text_none
+          Is Table Level Stats: true
+
+
+PREHOOK: query: analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+PREHOOK: type: QUERY
+PREHOOK: Input: default@uservisits_web_text_none
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@uservisits_web_text_none
+#### A masked pattern was here ####

Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_binary.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_binary.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/compute_stats_binary.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_binary.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,33 @@
+PREHOOK: query: create table tab_binary(a binary)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab_binary(a binary)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab_binary
+PREHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/binary.txt" INTO TABLE tab_binary
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab_binary
+POSTHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/binary.txt" INTO TABLE tab_binary
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab_binary
+PREHOOK: query: select count(*) from tab_binary
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_binary
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from tab_binary
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_binary
+#### A masked pattern was here ####
+10
+PREHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_binary
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_binary
+#### A masked pattern was here ####
+POSTHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_binary
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_binary
+#### A masked pattern was here ####
+{"columntype":"Binary","maxlength":36,"avglength":20.0,"countnulls":0}

Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_boolean.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_boolean.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/compute_stats_boolean.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_boolean.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,33 @@
+PREHOOK: query: create table tab_bool(a boolean)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab_bool(a boolean)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab_bool
+PREHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/bool.txt" INTO TABLE tab_bool
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab_bool
+POSTHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/bool.txt" INTO TABLE tab_bool
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab_bool
+PREHOOK: query: select count(*) from tab_bool
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_bool
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from tab_bool
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_bool
+#### A masked pattern was here ####
+33
+PREHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_bool
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_bool
+#### A masked pattern was here ####
+POSTHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_bool
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_bool
+#### A masked pattern was here ####
+{"columntype":"Boolean","counttrues":13,"countfalses":19,"countnulls":1}

Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_double.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_double.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/compute_stats_double.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_double.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,33 @@
+PREHOOK: query: create table tab_double(a double)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab_double(a double)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab_double
+PREHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/double.txt" INTO TABLE tab_double
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab_double
+POSTHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/double.txt" INTO TABLE tab_double
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab_double
+PREHOOK: query: select count(*) from tab_double
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_double
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from tab_double
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_double
+#### A masked pattern was here ####
+16
+PREHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_double
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_double
+#### A masked pattern was here ####
+POSTHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_double
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_double
+#### A masked pattern was here ####
+{"columntype":"Double","min":-87.2,"max":435.33,"countnulls":2,"numdistinctvalues":8}

Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_long.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_long.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/compute_stats_long.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_long.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,33 @@
+PREHOOK: query: create table tab_int(a int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab_int(a int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab_int
+PREHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab_int
+POSTHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/int.txt" INTO TABLE tab_int
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab_int
+PREHOOK: query: select count(*) from tab_int
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_int
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from tab_int
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_int
+#### A masked pattern was here ####
+12
+PREHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_int
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_int
+#### A masked pattern was here ####
+POSTHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_int
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_int
+#### A masked pattern was here ####
+{"columntype":"Long","min":0,"max":344,"countnulls":1,"numdistinctvalues":16}

Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_string.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_string.q.out?rev=1406465&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/compute_stats_string.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_string.q.out Wed Nov  7 04:55:00 2012
@@ -0,0 +1,33 @@
+PREHOOK: query: create table tab_string(a string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab_string(a string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab_string
+PREHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/string.txt" INTO TABLE tab_string
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab_string
+POSTHOOK: query: -- insert some data
+LOAD DATA LOCAL INPATH "../data/files/string.txt" INTO TABLE tab_string
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab_string
+PREHOOK: query: select count(*) from tab_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_string
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from tab_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_string
+#### A masked pattern was here ####
+10
+PREHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_string
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab_string
+#### A masked pattern was here ####
+POSTHOOK: query: -- compute statistical summary of data
+select compute_stats(a, 16) from tab_string
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab_string
+#### A masked pattern was here ####
+{"columntype":"String","maxlength":11,"avglength":3.9,"countnulls":0,"numdistinctvalues":5}

Modified: hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out?rev=1406465&r1=1406464&r2=1406465&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out Wed Nov  7 04:55:00 2012
@@ -36,6 +36,7 @@ ceil
 ceiling
 coalesce
 collect_set
+compute_stats
 concat
 concat_ws
 context_ngrams
@@ -182,6 +183,7 @@ ceil
 ceiling
 coalesce
 collect_set
+compute_stats
 concat
 concat_ws
 context_ngrams

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java?rev=1406465&r1=1406464&r2=1406465&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java Wed Nov  7 04:55:00 2012
@@ -665,7 +665,7 @@ public final class PrimitiveObjectInspec
   /**
    * Get the String value out of a primitive object. Note that
    * NullPointerException will be thrown if o is null. Note that
-   * NumberFormatException will be thrown if o is not a valid number.
+   * RuntimeException will be thrown if o is not a valid string.
    */
   public static String getString(Object o, PrimitiveObjectInspector oi) {