You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/08/27 07:41:45 UTC

svn commit: r990026 [9/10] - in /hadoop/hive/trunk: ./ eclipse-templates/ metastore/if/ metastore/src/gen-cpp/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/src/gen-py/hive_metastore/ metastore/src/ja...

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter2.q?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter2.q Fri Aug 27 05:41:43 2010
@@ -18,3 +18,38 @@ show partitions alter2;
 alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02';
 describe extended alter2;
 show partitions alter2;
+
+-- Cleanup
+DROP TABLE alter2;
+SHOW TABLES;
+
+-- Using non-default Database
+
+CREATE DATABASE alter2_db;
+USE alter2_db;
+SHOW TABLES;
+
+CREATE TABLE alter2(a int, b int) PARTITIONED BY (insertdate string);
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01';
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02';
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+DROP TABLE alter2;
+
+CREATE EXTERNAL TABLE alter2(a int, b int) PARTITIONED BY (insertdate string);
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01';
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02';
+DESCRIBE EXTENDED alter2;
+SHOW PARTITIONS alter2;
+
+DROP TABLE alter2;
+USE default;
+DROP DATABASE alter2_db;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter3.q?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter3.q Fri Aug 27 05:41:43 2010
@@ -19,3 +19,37 @@ select col1 from alter3_src;
 alter table alter3_like rename to alter3_like_renamed;
 
 describe extended alter3_like_renamed;
+
+-- Cleanup
+DROP TABLE alter3_src;
+DROP TABLE alter3_renamed;
+DROP TABLE alter3_like_renamed;
+SHOW TABLES;
+
+-- With non-default Database
+
+CREATE DATABASE alter3_db;
+USE alter3_db;
+SHOW TABLES;
+
+CREATE TABLE alter3_src (col1 STRING) STORED AS TEXTFILE ;
+LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE alter3_src ;
+
+CREATE TABLE alter3 (col1 STRING) PARTITIONED BY (pcol1 STRING, pcol2 STRING) STORED AS SEQUENCEFILE;
+
+CREATE TABLE alter3_like LIKE alter3;
+
+INSERT OVERWRITE TABLE alter3 PARTITION (pCol1='test_part', pcol2='test_part') SELECT col1 FROM alter3_src ;
+SELECT * FROM alter3 WHERE pcol1='test_part' AND pcol2='test_part';
+
+ALTER TABLE alter3 RENAME TO alter3_renamed;
+DESCRIBE EXTENDED alter3_renamed;
+DESCRIBE EXTENDED alter3_renamed PARTITION (pCol1='test_part', pcol2='test_part');
+SELECT * FROM alter3_renamed WHERE pcol1='test_part' AND pcol2='test_part';
+
+INSERT OVERWRITE TABLE alter3_like
+PARTITION (pCol1='test_part', pcol2='test_part')
+SELECT col1 FROM alter3_src;
+ALTER TABLE alter3_like RENAME TO alter3_like_renamed;
+
+DESCRIBE EXTENDED alter3_like_renamed;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter4.q?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter4.q Fri Aug 27 05:41:43 2010
@@ -3,3 +3,24 @@ DESCRIBE EXTENDED set_bucketing_test;
 
 ALTER TABLE set_bucketing_test NOT CLUSTERED;
 DESCRIBE EXTENDED set_bucketing_test;
+
+-- Cleanup
+DROP TABLE set_bucketing_test;
+SHOW TABLES;
+
+-- with non-default Database
+
+CREATE DATABASE alter4_db;
+USE alter4_db;
+SHOW TABLES;
+
+CREATE TABLE set_bucketing_test (key INT, value STRING) CLUSTERED BY (key) INTO 10 BUCKETS;
+DESCRIBE EXTENDED set_bucketing_test;
+
+ALTER TABLE set_bucketing_test NOT CLUSTERED;
+DESCRIBE EXTENDED set_bucketing_test;
+
+DROP TABLE set_bucketing_test;
+USE default;
+DROP DATABASE alter4_db;
+SHOW DATABASES;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/database.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/database.q?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/database.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/database.q Fri Aug 27 05:41:43 2010
@@ -0,0 +1,91 @@
+SHOW DATABASES;
+
+-- CREATE with comment
+CREATE DATABASE test_db COMMENT 'Hive test database';
+SHOW DATABASES;
+
+-- CREATE INE already exists
+CREATE DATABASE IF NOT EXISTS test_db;
+SHOW DATABASES;
+
+-- SHOW DATABASES synonym
+SHOW SCHEMAS;
+
+-- DROP
+DROP DATABASE test_db;
+SHOW DATABASES;
+
+-- CREATE INE doesn't exist
+CREATE DATABASE IF NOT EXISTS test_db COMMENT 'Hive test database';
+SHOW DATABASES;
+
+-- DROP IE exists
+DROP DATABASE IF EXISTS test_db;
+SHOW DATABASES;
+
+-- DROP IE doesn't exist
+DROP DATABASE IF EXISTS test_db;
+
+-- SHOW
+CREATE DATABASE test_db;
+SHOW DATABASES;
+
+-- SHOW pattern
+SHOW DATABASES LIKE 'test*';
+
+-- SHOW pattern
+SHOW DATABASES LIKE '*ef*';
+
+
+USE test_db;
+SHOW DATABASES;
+
+-- CREATE table in non-default DB
+CREATE TABLE test_table (col1 STRING) STORED AS TEXTFILE;
+SHOW TABLES;
+
+-- DESCRIBE table in non-default DB
+DESCRIBE test_table;
+
+-- DESCRIBE EXTENDED in non-default DB
+DESCRIBE EXTENDED test_table;
+
+-- CREATE LIKE in non-default DB
+CREATE TABLE test_table_like LIKE test_table;
+SHOW TABLES;
+DESCRIBE EXTENDED test_table_like;
+
+-- LOAD and SELECT
+LOAD DATA LOCAL INPATH '../data/files/test.dat' OVERWRITE INTO TABLE test_table ;
+SELECT * FROM test_table;
+
+-- DROP and CREATE w/o LOAD
+DROP TABLE test_table;
+SHOW TABLES;
+
+CREATE TABLE test_table (col1 STRING) STORED AS TEXTFILE;
+SHOW TABLES;
+
+SELECT * FROM test_table;
+
+-- CREATE table that already exists in DEFAULT
+USE test_db;
+CREATE TABLE src (col1 STRING) STORED AS TEXTFILE;
+SHOW TABLES;
+
+SELECT * FROM src LIMIT 10;
+
+USE default;
+SELECT * FROM src LIMIT 10;
+
+-- DROP DATABASE
+USE test_db;
+
+DROP TABLE src;
+DROP TABLE test_table;
+DROP TABLE test_table_like;
+SHOW TABLES;
+
+USE default;
+DROP DATABASE test_db;
+SHOW DATABASES;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/rename_column.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/rename_column.q?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/rename_column.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/rename_column.q Fri Aug 27 05:41:43 2010
@@ -22,5 +22,36 @@ DESCRIBE kv_rename_test;
 ALTER TABLE kv_rename_test CHANGE COLUMN a2 a INT AFTER b;
 DESCRIBE kv_rename_test;
 
+DROP TABLE kv_rename_test;
+SHOW TABLES;
 
+-- Using non-default Database
+CREATE DATABASE kv_rename_test_db;
+USE kv_rename_test_db;
 
+CREATE TABLE kv_rename_test(a int, b int, c int);
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a a STRING;
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a a1 INT;
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a1 a2 INT FIRST;
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a2 a INT AFTER b;
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a a1 INT COMMENT 'test comment1';
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE a1 a2 INT COMMENT 'test comment2' FIRST;
+DESCRIBE kv_rename_test;
+
+ALTER TABLE kv_rename_test CHANGE COLUMN a2 a INT AFTER b;
+DESCRIBE kv_rename_test;
+
+DROP TABLE kv_rename_test;
+SHOW TABLES;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_already_exists.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_already_exists.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_already_exists.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_already_exists.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,15 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to create a database that already exists
+CREATE DATABASE test_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Try to create a database that already exists
+CREATE DATABASE test_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: CREATE DATABASE test_db
+PREHOOK: type: CREATEDATABASE
+Failed with exception Database test_db already exists
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_already_exists.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_already_exists.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_already_exists.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_already_exists.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,15 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to create a database that already exists
+CREATE DATABASE test_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Try to create a database that already exists
+CREATE DATABASE test_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: CREATE DATABASE test_db
+PREHOOK: type: CREATEDATABASE
+Failed with exception Database test_db already exists
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_invalid_name.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_invalid_name.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_invalid_name.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_create_invalid_name.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,10 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to create a database with an invalid name
+CREATE DATABASE `test.db`
+PREHOOK: type: CREATEDATABASE
+FAILED: Error in metadata: InvalidObjectException(message:test.db is not a valid database name)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,10 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to drop a database that does not exist
+DROP DATABASE does_not_exist
+PREHOOK: type: DROPDATABASE
+Failed with exception There is no database named does_not_exist
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,28 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to drop a non-empty database
+CREATE DATABASE test_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Try to drop a non-empty database
+CREATE DATABASE test_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: USE test_db
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE test_db
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: CREATE TABLE t(a INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE t(a INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: test_db@t
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: DROP DATABASE test_db
+PREHOOK: type: DROPDATABASE
+FAILED: Error in metadata: InvalidOperationException(message:Database test_db is not empty)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out?rev=990026&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/database_switch_does_not_exist.q.out Fri Aug 27 05:41:43 2010
@@ -0,0 +1,10 @@
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+PREHOOK: query: -- Try to switch to a database that does not exist
+USE does_not_exist
+PREHOOK: type: SWITCHDATABASE
+FAILED: Error in metadata: ERROR: The database does_not_exist does not exist.
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out Fri Aug 27 05:41:43 2010
@@ -63,3 +63,102 @@ POSTHOOK: type: SHOWPARTITIONS
 ds=2010-01-01
 ds=2010-01-02
 ds=2010-01-03
+PREHOOK: query: DROP TABLE add_part_test
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@add_part_test
+PREHOOK: Output: default@add_part_test
+POSTHOOK: query: DROP TABLE add_part_test
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@add_part_test
+POSTHOOK: Output: default@add_part_test
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+src
+src1
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: -- Test ALTER TABLE ADD PARTITION in non-default Database
+CREATE DATABASE add_part_test_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Test ALTER TABLE ADD PARTITION in non-default Database
+CREATE DATABASE add_part_test_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: USE add_part_test_db
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE add_part_test_db
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+PREHOOK: query: CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: add_part_test_db@add_part_test
+PREHOOK: query: SHOW PARTITIONS add_part_test
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS add_part_test
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01
+PREHOOK: query: SHOW PARTITIONS add_part_test
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS add_part_test
+POSTHOOK: type: SHOWPARTITIONS
+ds=2010-01-01
+PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: add_part_test_db@add_part_test
+PREHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01
+POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01
+PREHOOK: query: SHOW PARTITIONS add_part_test
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS add_part_test
+POSTHOOK: type: SHOWPARTITIONS
+ds=2010-01-01
+PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-02
+PREHOOK: query: SHOW PARTITIONS add_part_test
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS add_part_test
+POSTHOOK: type: SHOWPARTITIONS
+ds=2010-01-01
+ds=2010-01-02
+PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: add_part_test_db@add_part_test
+PREHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01
+PREHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-02
+POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: add_part_test_db@add_part_test
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-01
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-02
+POSTHOOK: Output: add_part_test_db@add_part_test@ds=2010-01-03
+PREHOOK: query: SHOW PARTITIONS add_part_test
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS add_part_test
+POSTHOOK: type: SHOWPARTITIONS
+ds=2010-01-01
+ds=2010-01-02
+ds=2010-01-03

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out Fri Aug 27 05:41:43 2010
@@ -10,7 +10,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282026227}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282709490}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
 PREHOOK: type: ALTERTABLE_PROPERTIES
 PREHOOK: Input: default@alter1
@@ -26,7 +26,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, c=3, last_modified_time=1282026227, a=1, transient_lastDdlTime=1282026227}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=carl, c=3, last_modified_time=1282709490, a=1, transient_lastDdlTime=1282709490}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
 PREHOOK: type: ALTERTABLE_PROPERTIES
 PREHOOK: Input: default@alter1
@@ -42,7 +42,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3, last_modified_by=njain, c=4, last_modified_time=1282026227, a=1, transient_lastDdlTime=1282026227}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3, last_modified_by=carl, c=4, last_modified_time=1282709491, a=1, transient_lastDdlTime=1282709491}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set tblproperties ('EXTERNAL'='TRUE')
 PREHOOK: type: ALTERTABLE_PROPERTIES
 PREHOOK: Input: default@alter1
@@ -58,7 +58,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026228, a=1, transient_lastDdlTime=1282026228}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709491, a=1, transient_lastDdlTime=1282709491}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
 PREHOOK: query: alter table alter1 set tblproperties ('EXTERNAL'='FALSE')
 PREHOOK: type: ALTERTABLE_PROPERTIES
 PREHOOK: Input: default@alter1
@@ -74,7 +74,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026228, a=1, transient_lastDdlTime=1282026228}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709491, a=1, transient_lastDdlTime=1282709491}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set serdeproperties('s1'='9')
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@alter1
@@ -90,7 +90,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026228, a=1, transient_lastDdlTime=1282026228}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709491, a=1, transient_lastDdlTime=1282709491}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@alter1
@@ -106,7 +106,7 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=10, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026228, a=1, transient_lastDdlTime=1282026228}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=10, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709491, a=1, transient_lastDdlTime=1282709491}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
 PREHOOK: type: ALTERTABLE_SERIALIZER
 PREHOOK: Input: default@alter1
@@ -122,7 +122,7 @@ POSTHOOK: type: DESCTABLE
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:from deserializer), FieldSchema(name:b, type:int, comment:from deserializer)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.TestSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026228, a=1, transient_lastDdlTime=1282026228}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:from deserializer), FieldSchema(name:b, type:int, comment:from deserializer)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.TestSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709492, a=1, transient_lastDdlTime=1282709492}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
 PREHOOK: type: ALTERTABLE_SERIALIZER
 PREHOOK: Input: default@alter1
@@ -138,7 +138,7 @@ POSTHOOK: type: DESCTABLE
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1282026227, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:from deserializer), FieldSchema(name:b, type:string, comment:from deserializer)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=njain, c=4, last_modified_time=1282026229, a=1, transient_lastDdlTime=1282026229}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:carl, createTime:1282709490, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:from deserializer), FieldSchema(name:b, type:string, comment:from deserializer)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709492, a=1, transient_lastDdlTime=1282709492}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table alter1 replace columns (a int, b int, c string)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@alter1
@@ -154,3 +154,213 @@ POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 c	string	
+PREHOOK: query: -- Cleanup
+DROP TABLE alter1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@alter1
+PREHOOK: Output: default@alter1
+POSTHOOK: query: -- Cleanup
+DROP TABLE alter1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+src
+src1
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: -- With non-default Database
+
+CREATE DATABASE alter1_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- With non-default Database
+
+CREATE DATABASE alter1_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: USE alter1_db
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE alter1_db
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+PREHOOK: query: CREATE TABLE alter1(a INT, b INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE alter1(a INT, b INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282709493}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=carl, c=3, last_modified_time=1282709493, a=1, transient_lastDdlTime=1282709493}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3, last_modified_by=carl, c=4, last_modified_time=1282709493, a=1, transient_lastDdlTime=1282709493}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=TRUE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=10, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9')
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9')
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	string	from deserializer
+b	string	from deserializer
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:from deserializer), FieldSchema(name:b, type:int, comment:from deserializer)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.TestSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE EXTENDED alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter1
+POSTHOOK: type: DESCTABLE
+a	string	from deserializer
+b	string	from deserializer
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:alter1_db, owner:carl, createTime:1282709493, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:from deserializer), FieldSchema(name:b, type:string, comment:from deserializer)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter1_db.db/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20, s1=9, serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{EXTERNAL=FALSE, d=3, last_modified_by=carl, c=4, last_modified_time=1282709494, a=1, transient_lastDdlTime=1282709494}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: DESCRIBE alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+c	string	
+PREHOOK: query: DROP TABLE alter1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: alter1_db@alter1
+PREHOOK: Output: alter1_db@alter1
+POSTHOOK: query: DROP TABLE alter1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: alter1_db@alter1
+POSTHOOK: Output: alter1_db@alter1
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: DROP DATABASE alter1_db
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE alter1_db
+POSTHOOK: type: DROPDATABASE

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out?rev=990026&r1=990025&r2=990026&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out Fri Aug 27 05:41:43 2010
@@ -11,7 +11,7 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026229, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282026229}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710009, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710009}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
@@ -31,7 +31,7 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026229, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282026229}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710009, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710009}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
@@ -52,7 +52,7 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026229, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282026229}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710009, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710009}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
@@ -80,7 +80,7 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026231, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282026231}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710010, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710010}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
@@ -100,7 +100,7 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026231, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282026231}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710010, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710010}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
@@ -121,10 +121,194 @@ a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1282026231, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282026231}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:carl, createTime:1282710010, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710010}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
 PREHOOK: query: show partitions alter2
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: show partitions alter2
 POSTHOOK: type: SHOWPARTITIONS
 insertdate=2008-01-01
 insertdate=2008-01-02
+PREHOOK: query: -- Cleanup
+DROP TABLE alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@alter2
+PREHOOK: Output: default@alter2
+POSTHOOK: query: -- Cleanup
+DROP TABLE alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@alter2
+POSTHOOK: Output: default@alter2
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+src
+src1
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: -- Using non-default Database
+
+CREATE DATABASE alter2_db
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Using non-default Database
+
+CREATE DATABASE alter2_db
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: USE alter2_db
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE alter2_db
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+PREHOOK: query: CREATE TABLE alter2(a int, b int) PARTITIONED BY (insertdate string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE alter2(a int, b int) PARTITIONED BY (insertdate string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: alter2_db@alter2
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710012, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710012}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: alter2_db@alter2
+POSTHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710012, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710012}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+insertdate=2008-01-01
+PREHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: alter2_db@alter2
+POSTHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710012, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{transient_lastDdlTime=1282710012}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+insertdate=2008-01-01
+insertdate=2008-01-02
+PREHOOK: query: DROP TABLE alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: alter2_db@alter2
+PREHOOK: Output: alter2_db@alter2
+POSTHOOK: query: DROP TABLE alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2
+PREHOOK: query: CREATE EXTERNAL TABLE alter2(a int, b int) PARTITIONED BY (insertdate string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE EXTERNAL TABLE alter2(a int, b int) PARTITIONED BY (insertdate string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: alter2_db@alter2
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710013, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710013}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: alter2_db@alter2
+POSTHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-01') LOCATION '2008/01/01'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-01
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710013, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710013}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+insertdate=2008-01-01
+PREHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: alter2_db@alter2
+POSTHOOK: query: ALTER TABLE alter2 ADD PARTITION (insertdate='2008-01-02') LOCATION '2008/01/02'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2@insertdate=2008-01-02
+PREHOOK: query: DESCRIBE EXTENDED alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED alter2
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+insertdate	string	
+	 	 
+Detailed Table Information	Table(tableName:alter2, dbName:alter2_db, owner:carl, createTime:1282710013, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:pfile:/Users/carl/Projects/hive/build/ql/test/data/warehouse/alter2_db.db/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE, transient_lastDdlTime=1282710013}, viewOriginalText:null, viewExpandedText:null, tableType:EXTERNAL_TABLE)	
+PREHOOK: query: SHOW PARTITIONS alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS alter2
+POSTHOOK: type: SHOWPARTITIONS
+insertdate=2008-01-01
+insertdate=2008-01-02
+PREHOOK: query: DROP TABLE alter2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: alter2_db@alter2
+PREHOOK: Output: alter2_db@alter2
+POSTHOOK: query: DROP TABLE alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: alter2_db@alter2
+POSTHOOK: Output: alter2_db@alter2
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: DROP DATABASE alter2_db
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE alter2_db
+POSTHOOK: type: DROPDATABASE