You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/09/20 22:22:25 UTC

svn commit: r999101 [1/9] - in /hadoop/hive/trunk: ./ contrib/src/test/results/clientpositive/ hbase-handler/src/test/results/ hwi/src/test/org/apache/hadoop/hive/hwi/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exe...

Author: namit
Date: Mon Sep 20 20:22:22 2010
New Revision: 999101

URL: http://svn.apache.org/viewvc?rev=999101&view=rev
Log:
HIVE-558. Make describe output better
(Thiruvel Thirumoolan via namit)


Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out
    hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out
    hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java
    hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter_partition_format_loc.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/combine3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/database.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ddltime.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/describe_xpath.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_sequencefile.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_text.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/index_creation.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_bigdata.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_columnar.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rename_column.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/tablename_with_select.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Mon Sep 20 20:22:22 2010
@@ -138,6 +138,9 @@ Trunk -  Unreleased
     HIVE-1226. support filter pushdown against non-native tables
     (jvs via He Yongqiang)
 
+    HIVE-558. Make describe output better
+    (Thiruvel Thirumoolan via namit)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out Mon Sep 20 20:22:22 2010
@@ -44,10 +44,34 @@ PREHOOK: query: DESCRIBE EXTENDED base64
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE EXTENDED base64_test
 POSTHOOK: type: DESCTABLE
-key	int	
-value	string	
+col_name            	data_type           	comment             
+	 	 
+key                 	int                 	None                
+value               	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Fri Sep 17 01:51:29 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/test/data/warehouse/base64_test	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	transient_lastDdlTime	1284713489          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:base64_test, dbName:default, owner:njain, createTime:1282150167, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/njain/hive3/hive3/build/contrib/test/data/warehouse/base64_test, inputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat, outputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1282150167}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE base64_test
 SELECT key, value WHERE key < 10
@@ -65,11 +89,11 @@ POSTHOOK: Lineage: base64_test.value SIM
 PREHOOK: query: SELECT * FROM base64_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@base64_test
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-31_341_7110837578626291672/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-32_335_619401707200390008/-mr-10000
 POSTHOOK: query: SELECT * FROM base64_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@base64_test
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-31_341_7110837578626291672/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-32_335_619401707200390008/-mr-10000
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 0	val_0
@@ -107,11 +131,11 @@ POSTHOOK: Lineage: base64_test.value SIM
 PREHOOK: query: SELECT * FROM base64_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@base64_test
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-35_328_4985371350274176901/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-34_808_4140189104352013659/-mr-10000
 POSTHOOK: query: SELECT * FROM base64_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@base64_test
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-35_328_4985371350274176901/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-34_808_4140189104352013659/-mr-10000
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out Mon Sep 20 20:22:22 2010
@@ -15,23 +15,25 @@ PREHOOK: query: DESCRIBE s3log
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE s3log
 POSTHOOK: type: DESCTABLE
-bucketowner	string	from deserializer
-bucketname	string	from deserializer
-rdatetime	string	from deserializer
-rip	string	from deserializer
-requester	string	from deserializer
-requestid	string	from deserializer
-operation	string	from deserializer
-rkey	string	from deserializer
-requesturi	string	from deserializer
-httpstatus	int	from deserializer
-errorcode	string	from deserializer
-bytessent	int	from deserializer
-objsize	int	from deserializer
-totaltime	int	from deserializer
-turnaroundtime	int	from deserializer
-referer	string	from deserializer
-useragent	string	from deserializer
+col_name            	data_type           	comment             
+	 	 
+bucketowner         	string              	from deserializer   
+bucketname          	string              	from deserializer   
+rdatetime           	string              	from deserializer   
+rip                 	string              	from deserializer   
+requester           	string              	from deserializer   
+requestid           	string              	from deserializer   
+operation           	string              	from deserializer   
+rkey                	string              	from deserializer   
+requesturi          	string              	from deserializer   
+httpstatus          	int                 	from deserializer   
+errorcode           	string              	from deserializer   
+bytessent           	int                 	from deserializer   
+objsize             	int                 	from deserializer   
+totaltime           	int                 	from deserializer   
+turnaroundtime      	int                 	from deserializer   
+referer             	string              	from deserializer   
+useragent           	string              	from deserializer   
 PREHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
 PREHOOK: type: LOAD
 POSTHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
@@ -40,11 +42,11 @@ POSTHOOK: Output: default@s3log
 PREHOOK: query: SELECT a.* FROM s3log a
 PREHOOK: type: QUERY
 PREHOOK: Input: default@s3log
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-50_583_1488535606049303480/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-52-54_413_2340174263889776647/-mr-10000
 POSTHOOK: query: SELECT a.* FROM s3log a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@s3log
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-18_09-49-50_583_1488535606049303480/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-52-54_413_2340174263889776647/-mr-10000
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:01 +0000	212.143.99.188	65a011a29cdf8ec533ec3d1ccaae921c	D987234E52141DE7	REST.GET.OBJECT	pixy.gif	GET /pixy.gif?x-id=4560525f-2864-495c-842c-159ede7143f8 HTTP/1.1	200	-	828	828	3	2	http://www.mediafuturist.com/2009/02/marc-andreessen-on-the-charlie-rose-show-talking-about-mobile-technology-video.html	Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:01 +0000	74.244.182.35	65a011a29cdf8ec533ec3d1ccaae921c	626EECA20AB12A5C	REST.GET.OBJECT	pixy.gif	GET /pixy.gif HTTP/1.1	200	-	828	828	20	20	http://trueslant.com/	Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_6; en-us) AppleWebKit/528.16 (KHTML, like Gecko) Version/4 Public Beta Safari/528.16
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:02 +0000	62.149.175.120	65a011a29cdf8ec533ec3d1ccaae921c	3E93D70E69292C98	REST.GET.OBJECT	pixy.gif	GET /pixy.gif?x-id=9fec752e-2318-4da3-864e-ac5b9e47c4ae HTTP/1.0	200	-	828	828	4	3	-	-

Modified: hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out (original)
+++ hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out Mon Sep 20 20:22:22 2010
@@ -17,18 +17,45 @@ PREHOOK: query: DESCRIBE EXTENDED hbase_
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE EXTENDED hbase_table_1
 POSTHOOK: type: DESCTABLE
-key	int	from deserializer
-value	string	from deserializer
+col_name            	data_type           	comment             
+	 	 
+key                 	int                 	from deserializer   
+value               	string              	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Sun Sep 19 23:24:01 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.unsecure/build/hbase-handler/test/data/warehouse/hbase_table_1	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	hbase.table.name    	hbase_table_0       
+	transient_lastDdlTime	1284963841          
+	storage_handler     	org.apache.hadoop.hive.hbase.HBaseStorageHandler
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.hbase.HBaseSerDe	 
+InputFormat:        	org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+	hbase.columns.mapping	cf:string           
 	 	 
-Detailed Table Information	Table(tableName:hbase_table_1, dbName:default, owner:jsichi, createTime:1282870784, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_table_1, inputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat, outputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.hbase.HBaseSerDe, parameters:{serialization.format=1, hbase.columns.mapping=cf:string}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{hbase.table.name=hbase_table_0, transient_lastDdlTime=1282870784, storage_handler=org.apache.hadoop.hive.hbase.HBaseStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: select * from hbase_table_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-44_959_664484390867638581/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-01_683_4327611959875667509/-mr-10000
 POSTHOOK: query: select * from hbase_table_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-44_959_664484390867638581/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-01_683_4327611959875667509/-mr-10000
 PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
@@ -198,7 +225,7 @@ STAGE PLANS:
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/tmp/jsichi/hive_2010-08-26_17-59-48_518_6224953898347253125/-mr-10002 
+        file:/tmp/thiruvel/hive_2010-09-19_23-24-05_761_4987544134453195852/-mr-10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -237,7 +264,7 @@ ORDER BY key, value LIMIT 20
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-48_687_7267893285176645949/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-05_861_535022589520098382/-mr-10000
 POSTHOOK: query: SELECT Y.* 
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1) x
@@ -248,7 +275,7 @@ ORDER BY key, value LIMIT 20
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-48_687_7267893285176645949/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-05_861_535022589520098382/-mr-10000
 0	val_0
 0	val_0
 0	val_0
@@ -382,7 +409,7 @@ STAGE PLANS:
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/tmp/jsichi/hive_2010-08-26_17-59-56_537_4371415923720310528/-mr-10002 
+        file:/tmp/thiruvel/hive_2010-09-19_23-24-12_397_6864973420568576681/-mr-10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -420,7 +447,7 @@ ORDER BY key,value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@hbase_table_2
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-56_729_1646077641371437252/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-12_547_6310920357243853243/-mr-10000
 POSTHOOK: query: SELECT Y.*
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
@@ -431,7 +458,7 @@ ORDER BY key,value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@hbase_table_2
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_17-59-56_729_1646077641371437252/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-12_547_6310920357243853243/-mr-10000
 104	val_104
 114	val_114
 116	val_116
@@ -462,48 +489,48 @@ PREHOOK: query: select * from (select co
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_hbase_table
 PREHOOK: Input: default@empty_normal_table
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-07_075_2779968526096451886/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-21_501_1398129784538806928/-mr-10000
 POSTHOOK: query: select * from (select count(1) as c from empty_normal_table union all select count(1) as c from empty_hbase_table) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_hbase_table
 POSTHOOK: Input: default@empty_normal_table
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-07_075_2779968526096451886/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-21_501_1398129784538806928/-mr-10000
 0
 0
 PREHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_normal_table
 PREHOOK: Input: default@hbase_table_1
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-16_419_1002434022290170784/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-29_194_1717741306846244710/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_normal_table
 POSTHOOK: Input: default@hbase_table_1
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-16_419_1002434022290170784/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-29_194_1717741306846244710/-mr-10000
 0
 155
 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_hbase_table
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-25_859_2570695575641705647/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-36_929_1877869894302110012/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_hbase_table
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-25_859_2570695575641705647/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-36_929_1877869894302110012/-mr-10000
 0
 500
 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-35_201_7212376066806846352/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-44_477_2750297097097131407/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-35_201_7212376066806846352/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-44_477_2750297097097131407/-mr-10000
 155
 500
 PREHOOK: query: CREATE TABLE hbase_table_3(key int, value string, count int) 
@@ -703,20 +730,20 @@ POSTHOOK: Output: default@hbase_table_3
 PREHOOK: query: select count(1) from hbase_table_3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-55_042_4227305362165776181/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-01_253_8576430709310957378/-mr-10000
 POSTHOOK: query: select count(1) from hbase_table_3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-55_042_4227305362165776181/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-01_253_8576430709310957378/-mr-10000
 155
 PREHOOK: query: select * from hbase_table_3 order by key, value limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-58_660_6615166785753648742/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-04_391_6395645615304876336/-mr-10000
 POSTHOOK: query: select * from hbase_table_3 order by key, value limit 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-00-58_660_6615166785753648742/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-04_391_6395645615304876336/-mr-10000
 0	val_0	3
 2	val_2	1
 4	val_4	1
@@ -725,11 +752,11 @@ POSTHOOK: Output: file:/tmp/jsichi/hive_
 PREHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-02_229_7032023633895650392/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-07_505_7507809148698442371/-mr-10000
 POSTHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-02_229_7032023633895650392/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-07_505_7507809148698442371/-mr-10000
 0	3
 2	1
 4	1
@@ -765,11 +792,11 @@ POSTHOOK: Output: default@hbase_table_4
 PREHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_4
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-11_244_2234692314536262670/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-16_096_5831356591320350016/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_4
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-11_244_2234692314536262670/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-16_096_5831356591320350016/-mr-10000
 98	val_98	99	100
 100	val_100	101	102
 PREHOOK: query: DROP TABLE hbase_table_5
@@ -790,11 +817,11 @@ POSTHOOK: Output: default@hbase_table_5
 PREHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_5
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-15_009_1646727916221485033/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-19_335_8952484238655135527/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_5
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-15_009_1646727916221485033/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-19_335_8952484238655135527/-mr-10000
 98	{"b":"val_98","c":"99"}
 100	{"b":"val_100","c":"101"}
 PREHOOK: query: DROP TABLE hbase_table_6
@@ -827,11 +854,11 @@ POSTHOOK: Output: default@hbase_table_6
 PREHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_6
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-23_977_5712476856221236690/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-28_089_7578738986294123292/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_6
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-23_977_5712476856221236690/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-28_089_7578738986294123292/-mr-10000
 98	{"val_98":"98"}
 100	{"val_100":"100"}
 PREHOOK: query: DROP TABLE hbase_table_7
@@ -866,11 +893,11 @@ POSTHOOK: Output: default@hbase_table_7
 PREHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_7
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-32_885_9063305271253836242/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-36_674_7610811991249617421/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_7
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-32_885_9063305271253836242/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-36_674_7610811991249617421/-mr-10000
 {"VAL_98":"99.0","val_98":"98"}	98
 {"VAL_100":"101.0","val_100":"100"}	100
 PREHOOK: query: DROP TABLE hbase_table_8
@@ -903,11 +930,11 @@ POSTHOOK: Output: default@hbase_table_8
 PREHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_8
-PREHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-41_786_3914765665154306140/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-45_283_2749724644026569867/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_8
-POSTHOOK: Output: file:/tmp/jsichi/hive_2010-08-26_18-01-41_786_3914765665154306140/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-45_283_2749724644026569867/-mr-10000
 98	val_98	99	100
 100	val_100	101	102
 PREHOOK: query: DROP TABLE hbase_table_1

Modified: hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java (original)
+++ hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java Mon Sep 20 20:22:22 2010
@@ -121,10 +121,10 @@ public class TestHWISessionManager exten
 
     ArrayList<ArrayList<String>> searchBlockRes = searchItem.getResultBucket();
 
-    String resLine = searchBlockRes.get(0).get(0);
+    String resLine = searchBlockRes.get(0).get(2);
     assertEquals(true, resLine.contains("key"));
     assertEquals(true, resLine.contains("int"));
-    String resLine2 = searchBlockRes.get(0).get(1);
+    String resLine2 = searchBlockRes.get(0).get(3);
     assertEquals(true, resLine2.contains("value"));
     assertEquals(true, resLine2.contains("string"));
 

Modified: hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Mon Sep 20 20:22:22 2010
@@ -475,13 +475,15 @@ public class TestJdbcDriver extends Test
     ResultSet res = stmt.executeQuery("describe " + tableName);
 
     res.next();
-    assertEquals("Column name 'key' not found", "key", res.getString(1));
+    res.next();
+    res.next();
+    assertEquals("Column name 'key' not found", "key", res.getString(1).trim());
     assertEquals("Column type 'int' for column key not found", "int", res
-        .getString(2));
+        .getString(2).trim());
     res.next();
-    assertEquals("Column name 'value' not found", "value", res.getString(1));
+    assertEquals("Column name 'value' not found", "value", res.getString(1).trim());
     assertEquals("Column type 'string' for column key not found", "string", res
-        .getString(2));
+        .getString(2).trim());
 
     assertFalse("More results found than expected", res.next());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Mon Sep 20 20:22:22 2010
@@ -68,14 +68,7 @@ import org.apache.hadoop.hive.ql.Context
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.CheckResult;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker;
-import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
@@ -1615,64 +1608,31 @@ public class DDLTask extends Task<DDLWor
 
       LOG.info("DDLTask: got data for " + tbl.getTableName());
 
-      List<FieldSchema> cols = null;
+			Path resFile = new Path(descTbl.getResFile());
+			FileSystem fs = resFile.getFileSystem(conf);
+			DataOutput outStream = fs.create(resFile);
+
       if (colPath.equals(tableName)) {
-        cols = tbl.getCols();
-        if (part != null) {
-          cols = part.getCols();
-        }
+				outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl));
       } else {
+				List<FieldSchema> cols = null;
         cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
-      }
-      Path resFile = new Path(descTbl.getResFile());
-      FileSystem fs = resFile.getFileSystem(conf);
-      DataOutput outStream = fs.create(resFile);
-      Iterator<FieldSchema> iterCols = cols.iterator();
-      while (iterCols.hasNext()) {
-        // create a row per column
-        FieldSchema col = iterCols.next();
-        outStream.writeBytes(col.getName());
-        outStream.write(separator);
-        outStream.writeBytes(col.getType());
-        outStream.write(separator);
-        outStream.writeBytes(col.getComment() == null ? "" : col.getComment());
-        outStream.write(terminator);
+				outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols));
       }
 
       if (tableName.equals(colPath)) {
-        // also return the partitioning columns
-        List<FieldSchema> partCols = tbl.getPartCols();
-        Iterator<FieldSchema> iterPartCols = partCols.iterator();
-        while (iterPartCols.hasNext()) {
-          FieldSchema col = iterPartCols.next();
-          outStream.writeBytes(col.getName());
-          outStream.write(separator);
-          outStream.writeBytes(col.getType());
-          outStream.write(separator);
-          outStream
-              .writeBytes(col.getComment() == null ? "" : col.getComment());
-          outStream.write(terminator);
-        }
-
         // if extended desc table then show the complete details of the table
         if (descTbl.isExt()) {
           // add empty line
           outStream.write(terminator);
           if (part != null) {
             // show partition information
-            outStream.writeBytes("Detailed Partition Information");
-            outStream.write(separator);
-            outStream.writeBytes(part.getTPartition().toString());
-            outStream.write(separator);
+            outStream.writeBytes(MetaDataFormatUtils.getPartitionInformation(part));
             // comment column is empty
             outStream.write(terminator);
           } else {
             // show table information
-            outStream.writeBytes("Detailed Table Information");
-            outStream.write(separator);
-            outStream.writeBytes(tbl.getTTable().toString());
-            outStream.write(separator);
-            // comment column is empty
+            outStream.writeBytes(MetaDataFormatUtils.getTableInformation(tbl));
             outStream.write(terminator);
           }
         }

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java?rev=999101&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java Mon Sep 20 20:22:22 2010
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.plan.DescTableDesc;
+
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This class provides methods to format table information.
+ *
+ */
+public final class MetaDataFormatUtils {
+
+  public static final String FIELD_DELIM = "\t";
+  public static final String LINE_DELIM = "\n";
+
+  private static final int DEFAULT_STRINGBUILDER_SIZE = 2048;
+  private static final int ALIGNMENT = 20;
+
+  private MetaDataFormatUtils() {
+  }
+
+  public static String getAllColumnsInformation(Table table) {
+
+    StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+    formatColumnsHeader(columnInformation);
+    formatAllFields(columnInformation, table.getCols());
+
+    // Partitions
+    if (table.isPartitioned()) {
+      columnInformation.append(LINE_DELIM).append("# Partition Information")
+          .append(LINE_DELIM);
+      formatColumnsHeader(columnInformation);
+      formatAllFields(columnInformation, table.getPartCols());
+    }
+    return columnInformation.toString();
+  }
+
+  private static void formatColumnsHeader(StringBuilder columnInformation) {
+    formatOutput(getColumnsHeader(), columnInformation);
+    columnInformation.append(LINE_DELIM);
+  }
+
+  public static String getAllColumnsInformation(List<FieldSchema> cols) {
+    StringBuilder columnInformation = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+    formatColumnsHeader(columnInformation);
+    formatAllFields(columnInformation, cols);
+    return columnInformation.toString();
+  }
+
+  private static void formatAllFields(StringBuilder tableInfo, List<FieldSchema> cols) {
+    for (FieldSchema col : cols) {
+      formatFieldSchemas(tableInfo, col);
+    }
+  }
+
+  public static String getPartitionInformation(Partition part) {
+    StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+
+    // Table Metadata
+    tableInfo.append("# Detailed Partition Information").append(LINE_DELIM);
+    getPartitionMetaDataInformation(tableInfo, part);
+
+    // Storage information.
+    tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM);
+    getStorageDescriptorInfo(tableInfo, part.getTPartition().getSd());
+
+    return tableInfo.toString();
+  }
+
+  public static String getTableInformation(Table table) {
+    StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+
+    // Table Metadata
+    tableInfo.append("# Detailed Table Information").append(LINE_DELIM);
+    getTableMetaDataInformation(tableInfo, table);
+
+    // Storage information.
+    tableInfo.append(LINE_DELIM).append("# Storage Information").append(LINE_DELIM);
+    getStorageDescriptorInfo(tableInfo, table.getTTable().getSd());
+
+    if (table.isView()) {
+      tableInfo.append(LINE_DELIM).append("# View Information").append(LINE_DELIM);
+      getViewInfo(tableInfo, table);
+    }
+
+    return tableInfo.toString();
+  }
+
+  private static void getViewInfo(StringBuilder tableInfo, Table tbl) {
+    formatOutput("View Original Text:", tbl.getViewOriginalText(), tableInfo);
+    formatOutput("View Expanded Text:", tbl.getViewExpandedText(), tableInfo);
+  }
+
+  private static void getStorageDescriptorInfo(StringBuilder tableInfo,
+                                               StorageDescriptor storageDesc) {
+
+    formatOutput("SerDe Library:", storageDesc.getSerdeInfo().getSerializationLib(), tableInfo);
+    formatOutput("InputFormat:", storageDesc.getInputFormat(), tableInfo);
+    formatOutput("OutputFormat:", storageDesc.getOutputFormat(), tableInfo);
+    formatOutput("Compressed:", storageDesc.isCompressed() ? "Yes" : "No", tableInfo);
+    formatOutput("Num Buckets:", String.valueOf(storageDesc.getNumBuckets()), tableInfo);
+    formatOutput("Bucket Columns:", storageDesc.getBucketCols().toString(), tableInfo);
+    formatOutput("Sort Columns:", storageDesc.getSortCols().toString(), tableInfo);
+
+    if (storageDesc.getSerdeInfo().getParametersSize() > 0) {
+      tableInfo.append("Storage Desc Params:").append(LINE_DELIM);
+      displayAllParameters(storageDesc.getSerdeInfo().getParameters(), tableInfo);
+    }
+  }
+
+  private static void getTableMetaDataInformation(StringBuilder tableInfo, Table  tbl) {
+    formatOutput("Database:", tbl.getDbName(), tableInfo);
+    formatOutput("Owner:", tbl.getOwner(), tableInfo);
+    formatOutput("CreateTime:", formatDate(tbl.getTTable().getCreateTime()), tableInfo);
+    formatOutput("LastAccessTime:", formatDate(tbl.getTTable().getLastAccessTime()), tableInfo);
+    String protectMode = tbl.getProtectMode().toString();
+    formatOutput("Protect Mode:", protectMode == null ? "None" : protectMode, tableInfo);
+    formatOutput("Retention:", Integer.toString(tbl.getRetention()), tableInfo);
+    if (!tbl.isView()) {
+      formatOutput("Location:", tbl.getDataLocation().toString(), tableInfo);
+    }
+    formatOutput("Table Type:", tbl.getTableType().name(), tableInfo);
+
+    if (tbl.getParameters().size() > 0) {
+      tableInfo.append("Table Parameters:").append(LINE_DELIM);
+      displayAllParameters(tbl.getParameters(), tableInfo);
+    }
+  }
+
+  private static void getPartitionMetaDataInformation(StringBuilder tableInfo, Partition part) {
+    formatOutput("Partition Value:", part.getValues().toString(), tableInfo);
+    formatOutput("Database:", part.getTPartition().getDbName(), tableInfo);
+    formatOutput("Table:", part.getTable().getTableName(), tableInfo);
+    formatOutput("CreateTime:", formatDate(part.getTPartition().getCreateTime()), tableInfo);
+    formatOutput("LastAccessTime:", formatDate(part.getTPartition().getLastAccessTime()),
+        tableInfo);
+    String protectMode = part.getProtectMode().toString();
+    formatOutput("Protect Mode:", protectMode == null ? "None" : protectMode, tableInfo);
+    formatOutput("Location:", part.getLocation(), tableInfo);
+
+    if (part.getTPartition().getParameters().size() > 0) {
+      tableInfo.append("Partition Parameters:").append(LINE_DELIM);
+      displayAllParameters(part.getTPartition().getParameters(), tableInfo);
+    }
+  }
+
+  private static void displayAllParameters(Map<String, String> params, StringBuilder tableInfo) {
+    for (Map.Entry<String, String> parameter: params.entrySet()) {
+      tableInfo.append(FIELD_DELIM); // Ensures all params are indented.
+      formatOutput(parameter.getKey(), StringEscapeUtils.escapeJava(parameter.getValue()),
+          tableInfo);
+    }
+  }
+
+  private static void formatFieldSchemas(StringBuilder tableInfo, FieldSchema col) {
+    String comment = col.getComment() != null ? col.getComment() : "None";
+    formatOutput(col.getName(), col.getType(), comment, tableInfo);
+  }
+
+  private static String formatDate(long timeInSeconds) {
+    Date date = new Date(timeInSeconds * 1000);
+    return date.toString();
+  }
+
+  private static void formatOutput(String[] fields, StringBuilder tableInfo) {
+    for (String field : fields) {
+      tableInfo.append(String.format("%-" + ALIGNMENT + "s", field)).append(FIELD_DELIM);
+    }
+    tableInfo.append(LINE_DELIM);
+  }
+
+  private static void formatOutput(String name, String value,
+                                   StringBuilder tableInfo) {
+    tableInfo.append(String.format("%-" + ALIGNMENT + "s", name)).append(FIELD_DELIM);
+    tableInfo.append(String.format("%-" + ALIGNMENT + "s", value)).append(LINE_DELIM);
+  }
+
+  private static void formatOutput(String col1, String col2, String col3,
+                                   StringBuilder tableInfo) {
+    tableInfo.append(String.format("%-" + ALIGNMENT + "s", col1)).append(FIELD_DELIM);
+    tableInfo.append(String.format("%-" + ALIGNMENT + "s", col2)).append(FIELD_DELIM);
+    tableInfo.append(String.format("%-" + ALIGNMENT + "s", col3)).append(LINE_DELIM);
+  }
+
+  public static String[] getColumnsHeader() {
+    return DescTableDesc.getSchema().split("#")[0].split(",");
+  }
+}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Sep 20 20:22:22 2010
@@ -792,7 +792,7 @@ public class DDLSemanticAnalyzer extends
         partSpec, isExt);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         descTblDesc), conf));
-    setFetchTask(createFetchTask(descTblDesc.getSchema()));
+    setFetchTask(createFetchTask(DescTableDesc.getSchema()));
     LOG.info("analyzeDescribeTable done");
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java Mon Sep 20 20:22:22 2010
@@ -69,7 +69,7 @@ public class DescTableDesc extends DDLDe
     return table;
   }
 
-  public String getSchema() {
+  public static String getSchema() {
     return schema;
   }
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Mon Sep 20 20:22:22 2010
@@ -884,8 +884,12 @@ public class QTestUtil {
         "-I", "invalidscheme:",
         "-I", "lastUpdateTime",
         "-I", "lastAccessTime",
-        "-I", "owner",
+        "-I", "[Oo]wner",
+        "-I", "CreateTime",
+        "-I", "LastAccessTime",
+        "-I", "Location",
         "-I", "transient_lastDdlTime",
+        "-I", "last_modified_",
         "-I", "java.lang.RuntimeException",
         "-I", "at org",
         "-I", "at sun",

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out Mon Sep 20 20:22:22 2010
@@ -32,11 +32,41 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode_no_drop partition (p='p1')
 POSTHOOK: type: DESCTABLE
-c1	string	
-c2	string	
-p	string	
+col_name            	data_type           	comment             
+	 	 
+c1                  	string              	None                
+c2                  	string              	None                
+	 	 
+# Partition Information	 	 
+col_name            	data_type           	comment             
+	 	 
+p                   	string              	None                
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[p1]                	 
+Database:           	default             	 
+Table:              	tbl_protectmode_no_drop	 
+CreateTime:         	Tue Sep 14 08:27:42 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	NO_DROP             	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1	 
+Partition Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478062          
+	PROTECT_MODE        	NO_DROP             
+	transient_lastDdlTime	1284478062          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl_protectmode_no_drop, createTime:1284168328, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:c1, type:string, comment:null), FieldSchema(name:c2, type:string, comment:null)], location:pfile:/data/users/sdong/www/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=sdong, last_modified_time=1284168328, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1284168328})	
 PREHOOK: query: alter table tbl_protectmode_no_drop drop partition (p='p1')
 PREHOOK: type: ALTERTABLE_DROPPARTS
 PREHOOK: Input: default@tbl_protectmode_no_drop

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out Mon Sep 20 20:22:22 2010
@@ -46,8 +46,39 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode2
 POSTHOOK: type: DESCTABLE
-col	string	
-p	string	
+col_name            	data_type           	comment             
+	 	 
+col                 	string              	None                
+	 	 
+# Partition Information	 	 
+col_name            	data_type           	comment             
+	 	 
+p                   	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Sep 14 08:28:04 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	OFFLINE             	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode2	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478084          
+	PROTECT_MODE        	OFFLINE             
+	transient_lastDdlTime	1284478084          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode2, dbName:default, owner:njain, createTime:1282025357, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025358, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025358}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode2

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out Mon Sep 20 20:22:22 2010
@@ -14,11 +14,11 @@ POSTHOOK: Output: default@tbl_protectmod
 PREHOOK: query: select col from tbl_protectmode_4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode_4
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-18_644_2317445421479072812/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_08-28-23_139_4654751823762752374/-mr-10000
 POSTHOOK: query: select col from tbl_protectmode_4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode_4
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-18_644_2317445421479072812/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_08-28-23_139_4654751823762752374/-mr-10000
 PREHOOK: query: alter table tbl_protectmode_4 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl_protectmode_4
@@ -31,7 +31,34 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode_4
 POSTHOOK: type: DESCTABLE
-col	string	
+col_name            	data_type           	comment             
+	 	 
+col                 	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Sep 14 08:28:23 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	OFFLINE             	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode_4	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478106          
+	PROTECT_MODE        	OFFLINE             
+	transient_lastDdlTime	1284478106          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_4, dbName:default, owner:njain, createTime:1282025358, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1282025361, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025361}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_4

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out Mon Sep 20 20:22:22 2010
@@ -55,8 +55,39 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode_tbl4
 POSTHOOK: type: DESCTABLE
-col	string	
-p	string	
+col_name            	data_type           	comment             
+	 	 
+col                 	string              	None                
+	 	 
+# Partition Information	 	 
+col_name            	data_type           	comment             
+	 	 
+p                   	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Sep 14 08:29:03 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	OFFLINE             	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode_tbl4	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478143          
+	PROTECT_MODE        	OFFLINE             
+	transient_lastDdlTime	1284478143          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_tbl4, dbName:default, owner:njain, createTime:1282025362, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025362, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025362}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl4

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out Mon Sep 20 20:22:22 2010
@@ -55,8 +55,39 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode_tbl5
 POSTHOOK: type: DESCTABLE
-col	string	
-p	string	
+col_name            	data_type           	comment             
+	 	 
+col                 	string              	None                
+	 	 
+# Partition Information	 	 
+col_name            	data_type           	comment             
+	 	 
+p                   	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Sep 14 08:29:35 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	OFFLINE             	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode_tbl5	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478175          
+	PROTECT_MODE        	OFFLINE             
+	transient_lastDdlTime	1284478175          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_tbl5, dbName:default, owner:njain, createTime:1282025363, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl5, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025363, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025363}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl5

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out?rev=999101&r1=999100&r2=999101&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out Mon Sep 20 20:22:22 2010
@@ -14,11 +14,11 @@ POSTHOOK: Output: default@tbl_protectmod
 PREHOOK: query: select * from tbl_protectmode__no_drop
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode__no_drop
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-24_500_202960055467493836/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_08-39-31_893_2625855232363516041/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode__no_drop
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode__no_drop
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-24_500_202960055467493836/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-14_08-39-31_893_2625855232363516041/-mr-10000
 PREHOOK: query: alter table tbl_protectmode__no_drop enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl_protectmode__no_drop
@@ -31,9 +31,36 @@ PREHOOK: query: desc extended tbl_protec
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: desc extended tbl_protectmode__no_drop
 POSTHOOK: type: DESCTABLE
-col	string	
+col_name            	data_type           	comment             
+	 	 
+col                 	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Sep 14 08:39:31 PDT 2010	 
+LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
+Protect Mode:       	NO_DROP             	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive/build/ql/test/data/warehouse/tbl_protectmode__no_drop	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	last_modified_by    	thiruvel            
+	last_modified_time  	1284478771          
+	PROTECT_MODE        	NO_DROP             
+	transient_lastDdlTime	1284478771          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode__no_drop, dbName:default, owner:njain, createTime:1282025364, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode__no_drop, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1282025364, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1282025364}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: drop table tbl_protectmode__no_drop
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@tbl_protectmode__no_drop