You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by nz...@apache.org on 2010/10/13 06:26:38 UTC

svn commit: r1021992 [1/13] - in /hadoop/hive/trunk: ./ contrib/src/test/results/clientpositive/ hbase-handler/src/test/queries/ hbase-handler/src/test/results/ hwi/src/test/org/apache/hadoop/hive/hwi/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/...

Author: nzhang
Date: Wed Oct 13 04:26:34 2010
New Revision: 1021992

URL: http://svn.apache.org/viewvc?rev=1021992&view=rev
Log:
HIVE-1658. Fix describe [extended] column formatting (Thiruvel Thirumoolan via Ning Zhang)

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/describe_table.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/describe_table.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
    hadoop/hive/trunk/hbase-handler/src/test/queries/hbase_stats.q
    hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out
    hadoop/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out
    hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java
    hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter_partition_format_loc.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/combine3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_default_prop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_escape.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_like.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_nested_type.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/database.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ddltime.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/describe_xpath.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_sequencefile.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_text.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/index_creation.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/load_dyn_part9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_bigdata.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_columnar.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rename_column.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats0.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats4.q.out_0.17
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/stats9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/tablename_with_select.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Oct 13 04:26:34 2010
@@ -367,6 +367,9 @@ Trunk -  Unreleased
     HIVE-1601. Hadoop 0.17 ant test broken by HIVE-1523
     (Joydeep Sen Sarma via jvs)
 
+    HIVE-1658. Fix describe [extended] column formatting
+    (Thiruvel Thirumoolan via Ning Zhang)
+
 Release 0.6.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/fileformat_base64.q.out Wed Oct 13 04:26:34 2010
@@ -44,34 +44,10 @@ PREHOOK: query: DESCRIBE EXTENDED base64
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE EXTENDED base64_test
 POSTHOOK: type: DESCTABLE
-col_name            	data_type           	comment             
-	 	 
-key                 	int                 	None                
-value               	string              	None                
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-Owner:              	thiruvel            	 
-CreateTime:         	Fri Sep 17 01:51:29 PDT 2010	 
-LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
-Protect Mode:       	None                	 
-Retention:          	0                   	 
-Location:           	pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/test/data/warehouse/base64_test	 
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	transient_lastDdlTime	1284713489          
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
+key	int	
+value	string	
 	 	 
+Detailed Table Information	Table(tableName:base64_test, dbName:default, owner:thiruvel, createTime:1286801447, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/test/data/warehouse/base64_test, inputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat, outputFormat:org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1286801447}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE base64_test
 SELECT key, value WHERE key < 10
@@ -89,11 +65,11 @@ POSTHOOK: Lineage: base64_test.value SIM
 PREHOOK: query: SELECT * FROM base64_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@base64_test
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-32_335_619401707200390008/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-50-51_166_5701789276947169864/-mr-10000
 POSTHOOK: query: SELECT * FROM base64_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@base64_test
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-32_335_619401707200390008/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-50-51_166_5701789276947169864/-mr-10000
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 0	val_0
@@ -131,11 +107,11 @@ POSTHOOK: Lineage: base64_test.value SIM
 PREHOOK: query: SELECT * FROM base64_test
 PREHOOK: type: QUERY
 PREHOOK: Input: default@base64_test
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-34_808_4140189104352013659/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-50-54_569_4775014653889348884/-mr-10000
 POSTHOOK: query: SELECT * FROM base64_test
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@base64_test
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-51-34_808_4140189104352013659/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-50-54_569_4775014653889348884/-mr-10000
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: base64_test.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_s3.q.out Wed Oct 13 04:26:34 2010
@@ -15,25 +15,23 @@ PREHOOK: query: DESCRIBE s3log
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE s3log
 POSTHOOK: type: DESCTABLE
-col_name            	data_type           	comment             
-	 	 
-bucketowner         	string              	from deserializer   
-bucketname          	string              	from deserializer   
-rdatetime           	string              	from deserializer   
-rip                 	string              	from deserializer   
-requester           	string              	from deserializer   
-requestid           	string              	from deserializer   
-operation           	string              	from deserializer   
-rkey                	string              	from deserializer   
-requesturi          	string              	from deserializer   
-httpstatus          	int                 	from deserializer   
-errorcode           	string              	from deserializer   
-bytessent           	int                 	from deserializer   
-objsize             	int                 	from deserializer   
-totaltime           	int                 	from deserializer   
-turnaroundtime      	int                 	from deserializer   
-referer             	string              	from deserializer   
-useragent           	string              	from deserializer   
+bucketowner	string	from deserializer
+bucketname	string	from deserializer
+rdatetime	string	from deserializer
+rip	string	from deserializer
+requester	string	from deserializer
+requestid	string	from deserializer
+operation	string	from deserializer
+rkey	string	from deserializer
+requesturi	string	from deserializer
+httpstatus	int	from deserializer
+errorcode	string	from deserializer
+bytessent	int	from deserializer
+objsize	int	from deserializer
+totaltime	int	from deserializer
+turnaroundtime	int	from deserializer
+referer	string	from deserializer
+useragent	string	from deserializer
 PREHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
 PREHOOK: type: LOAD
 POSTHOOK: query: LOAD DATA LOCAL INPATH '../contrib/data/files/s3.log' INTO TABLE s3log
@@ -42,11 +40,11 @@ POSTHOOK: Output: default@s3log
 PREHOOK: query: SELECT a.* FROM s3log a
 PREHOOK: type: QUERY
 PREHOOK: Input: default@s3log
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-52-54_413_2340174263889776647/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-29-52_645_7274865857229017916/-mr-10000
 POSTHOOK: query: SELECT a.* FROM s3log a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@s3log
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-17_01-52-54_413_2340174263889776647/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-29-52_645_7274865857229017916/-mr-10000
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:01 +0000	212.143.99.188	65a011a29cdf8ec533ec3d1ccaae921c	D987234E52141DE7	REST.GET.OBJECT	pixy.gif	GET /pixy.gif?x-id=4560525f-2864-495c-842c-159ede7143f8 HTTP/1.1	200	-	828	828	3	2	http://www.mediafuturist.com/2009/02/marc-andreessen-on-the-charlie-rose-show-talking-about-mobile-technology-video.html	Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:01 +0000	74.244.182.35	65a011a29cdf8ec533ec3d1ccaae921c	626EECA20AB12A5C	REST.GET.OBJECT	pixy.gif	GET /pixy.gif HTTP/1.1	200	-	828	828	20	20	http://trueslant.com/	Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_6; en-us) AppleWebKit/528.16 (KHTML, like Gecko) Version/4 Public Beta Safari/528.16
 04ff331638adc13885d6c42059584deabbdeabcd55bf0bee491172a79a87b196	img.zemanta.com	09/Apr/2009:22:00:02 +0000	62.149.175.120	65a011a29cdf8ec533ec3d1ccaae921c	3E93D70E69292C98	REST.GET.OBJECT	pixy.gif	GET /pixy.gif?x-id=9fec752e-2318-4da3-864e-ac5b9e47c4ae HTTP/1.0	200	-	828	828	4	3	-	-

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes.q.out Wed Oct 13 04:26:34 2010
@@ -34,10 +34,11 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-4 depends on stages: Stage-1 , consists of Stage-3, Stage-2
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
   Stage-3
-  Stage-0 depends on stages: Stage-3, Stage-2
-  Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -81,14 +82,14 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest1
 
-  Stage: Stage-4
+  Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-3
+  Stage: Stage-4
     Move Operator
       files:
           hdfs directory: true
-          destination: pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-05_576_565846532461197178/-ext-10000
+          destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-31-17_495_4744449209030401685/-ext-10000
 
   Stage: Stage-0
     Move Operator
@@ -101,9 +102,12 @@ STAGE PLANS:
               name: dest1
 
   Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-05_576_565846532461197178/-ext-10002 
+        pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-31-17_495_4744449209030401685/-ext-10002 
             File Output Operator
               compressed: false
               GlobalTableId: 0
@@ -143,11 +147,11 @@ POSTHOOK: Lineage: dest1.value SCRIPT [(
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-09_661_6867439153629672720/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-31-21_234_2785992426371159165/-mr-10000
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-09_661_6867439153629672720/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-31-21_234_2785992426371159165/-mr-10000
 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes2.q.out Wed Oct 13 04:26:34 2010
@@ -34,10 +34,11 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-4 depends on stages: Stage-1 , consists of Stage-3, Stage-2
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
   Stage-3
-  Stage-0 depends on stages: Stage-3, Stage-2
-  Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -74,14 +75,14 @@ STAGE PLANS:
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
-  Stage: Stage-4
+  Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-3
+  Stage: Stage-4
     Move Operator
       files:
           hdfs directory: true
-          destination: pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-10_570_2176265708028894787/-ext-10000
+          destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-31-53_353_7823608103941051898/-ext-10000
 
   Stage: Stage-0
     Move Operator
@@ -94,9 +95,12 @@ STAGE PLANS:
               name: dest1
 
   Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-10_570_2176265708028894787/-ext-10002 
+        pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-31-53_353_7823608103941051898/-ext-10002 
             File Output Operator
               compressed: false
               GlobalTableId: 0
@@ -136,11 +140,11 @@ POSTHOOK: Lineage: dest1.value SCRIPT [(
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-14_903_5542423827591705393/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-31-57_143_8244293612021781232/-mr-10000
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-14_903_5542423827591705393/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-31-57_143_8244293612021781232/-mr-10000
 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes3.q.out Wed Oct 13 04:26:34 2010
@@ -34,10 +34,11 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-4 depends on stages: Stage-1 , consists of Stage-3, Stage-2
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
   Stage-3
-  Stage-0 depends on stages: Stage-3, Stage-2
-  Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -74,14 +75,14 @@ STAGE PLANS:
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
-  Stage: Stage-4
+  Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-3
+  Stage: Stage-4
     Move Operator
       files:
           hdfs directory: true
-          destination: pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-15_729_7647225505062209828/-ext-10000
+          destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-32-28_617_3175042933764903801/-ext-10000
 
   Stage: Stage-0
     Move Operator
@@ -94,9 +95,12 @@ STAGE PLANS:
               name: dest1
 
   Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-15_729_7647225505062209828/-ext-10002 
+        pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-32-28_617_3175042933764903801/-ext-10002 
             File Output Operator
               compressed: false
               GlobalTableId: 0
@@ -136,11 +140,11 @@ POSTHOOK: Lineage: dest1.value SCRIPT [(
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-19_894_8743234042285819656/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-32-32_283_6994704300925389180/-mr-10000
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-19_894_8743234042285819656/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-32-32_283_6994704300925389180/-mr-10000
 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238

Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_typedbytes5.q.out Wed Oct 13 04:26:34 2010
@@ -34,10 +34,11 @@ ABSTRACT SYNTAX TREE:
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-4 depends on stages: Stage-1 , consists of Stage-3, Stage-2
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
   Stage-3
-  Stage-0 depends on stages: Stage-3, Stage-2
-  Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -81,14 +82,14 @@ STAGE PLANS:
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest1
 
-  Stage: Stage-4
+  Stage: Stage-5
     Conditional Operator
 
-  Stage: Stage-3
+  Stage: Stage-4
     Move Operator
       files:
           hdfs directory: true
-          destination: pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-25_534_5633207757225095838/-ext-10000
+          destination: pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-33-03_025_994991423817228026/-ext-10000
 
   Stage: Stage-0
     Move Operator
@@ -101,9 +102,12 @@ STAGE PLANS:
               name: dest1
 
   Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
     Map Reduce
       Alias -> Map Operator Tree:
-        pfile:/data/users/njain/hive1/hive1/build/contrib/scratchdir/hive_2010-08-23_13-24-25_534_5633207757225095838/-ext-10002 
+        pfile:/home/thiruvel/projects/hive/hive.unsecure/build/contrib/scratchdir/hive_2010-10-12_03-33-03_025_994991423817228026/-ext-10002 
             File Output Operator
               compressed: false
               GlobalTableId: 0
@@ -143,11 +147,11 @@ POSTHOOK: Lineage: dest1.value SCRIPT [(
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-29_338_1419211699196835493/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-33-06_714_8419419762175928270/-mr-10000
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-23_13-24-29_338_1419211699196835493/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-12_03-33-06_714_8419419762175928270/-mr-10000
 POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 238	val_238

Modified: hadoop/hive/trunk/hbase-handler/src/test/queries/hbase_stats.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hbase-handler/src/test/queries/hbase_stats.q?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/hbase-handler/src/test/queries/hbase_stats.q (original)
+++ hadoop/hive/trunk/hbase-handler/src/test/queries/hbase_stats.q Wed Oct 13 04:26:34 2010
@@ -1,23 +1,21 @@
 set datanucleus.cache.collections=false;
 
 set hive.stats.dbclass=hbase;
-analyze table src compute statistics;
 
-desc extended src;
-
-analyze table srcpart partition(ds='2008-04-08', hr=11) compute statistics;
-analyze table srcpart partition(ds='2008-04-08', hr=12) compute statistics;
-
-desc extended srcpart partition(ds='2008-04-08', hr=11);
-desc extended srcpart partition(ds='2008-04-08', hr=12);
-desc extended srcpart;
+create table stats_src like src;
+insert overwrite table stats_src select * from src;
+analyze table stats_src compute statistics;
+desc formatted stats_src;
 
 create table hbase_part like srcpart;
 
 insert overwrite table hbase_part partition (ds='2010-04-08', hr = '11') select key, value from src;
 insert overwrite table hbase_part partition (ds='2010-04-08', hr = '12') select key, value from src;
 
-desc extended hbase_part;
-desc extended hbase_part partition (ds='2010-04-08', hr = '11');
-desc extended hbase_part partition (ds='2010-04-08', hr = '12');
+analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics;
+analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics;
+
+desc formatted hbase_part;
+desc formatted hbase_part partition (ds='2010-04-08', hr = '11');
+desc formatted hbase_part partition (ds='2010-04-08', hr = '12');
 

Modified: hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out (original)
+++ hadoop/hive/trunk/hbase-handler/src/test/results/hbase_queries.q.out Wed Oct 13 04:26:34 2010
@@ -17,45 +17,18 @@ PREHOOK: query: DESCRIBE EXTENDED hbase_
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE EXTENDED hbase_table_1
 POSTHOOK: type: DESCTABLE
-col_name            	data_type           	comment             
-	 	 
-key                 	int                 	from deserializer   
-value               	string              	from deserializer   
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-Owner:              	thiruvel            	 
-CreateTime:         	Sun Sep 19 23:24:01 PDT 2010	 
-LastAccessTime:     	Wed Dec 31 16:00:00 PST 1969	 
-Protect Mode:       	None                	 
-Retention:          	0                   	 
-Location:           	pfile:/home/thiruvel/projects/hive/hive.unsecure/build/hbase-handler/test/data/warehouse/hbase_table_1	 
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	hbase.table.name    	hbase_table_0       
-	transient_lastDdlTime	1284963841          
-	storage_handler     	org.apache.hadoop.hive.hbase.HBaseStorageHandler
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.hbase.HBaseSerDe	 
-InputFormat:        	org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-	hbase.columns.mapping	cf:string           
+key	int	from deserializer
+value	string	from deserializer
 	 	 
+Detailed Table Information	Table(tableName:hbase_table_1, dbName:default, owner:thiruvel, createTime:1286801985, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/home/thiruvel/projects/hive/hive.unsecure/build/hbase-handler/test/data/warehouse/hbase_table_1, inputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat, outputFormat:org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.hbase.HBaseSerDe, parameters:{serialization.format=1, hbase.columns.mapping=cf:string}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{hbase.table.name=hbase_table_0, transient_lastDdlTime=1286801985, storage_handler=org.apache.hadoop.hive.hbase.HBaseStorageHandler}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TAB
 LE)	
 PREHOOK: query: select * from hbase_table_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-01_683_4327611959875667509/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-45_101_8453038815095274852/-mr-10000
 POSTHOOK: query: select * from hbase_table_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-01_683_4327611959875667509/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-45_101_8453038815095274852/-mr-10000
 PREHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0
@@ -225,7 +198,7 @@ STAGE PLANS:
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/tmp/thiruvel/hive_2010-09-19_23-24-05_761_4987544134453195852/-mr-10002 
+        file:/tmp/thiruvel/hive_2010-10-11_05-59-48_123_448732892425215668/-mr-10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -264,7 +237,7 @@ ORDER BY key, value LIMIT 20
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-05_861_535022589520098382/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-48_231_7394901214808727992/-mr-10000
 POSTHOOK: query: SELECT Y.* 
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1) x
@@ -275,7 +248,7 @@ ORDER BY key, value LIMIT 20
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-05_861_535022589520098382/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-48_231_7394901214808727992/-mr-10000
 0	val_0
 0	val_0
 0	val_0
@@ -409,7 +382,7 @@ STAGE PLANS:
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/tmp/thiruvel/hive_2010-09-19_23-24-12_397_6864973420568576681/-mr-10002 
+        file:/tmp/thiruvel/hive_2010-10-11_05-59-53_782_8186257647559313852/-mr-10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -447,7 +420,7 @@ ORDER BY key,value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@hbase_table_2
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-12_547_6310920357243853243/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-53_934_5093221478605005777/-mr-10000
 POSTHOOK: query: SELECT Y.*
 FROM 
 (SELECT hbase_table_1.* FROM hbase_table_1 WHERE hbase_table_1.key > 100) x
@@ -458,7 +431,7 @@ ORDER BY key,value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@hbase_table_2
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-12_547_6310920357243853243/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_05-59-53_934_5093221478605005777/-mr-10000
 104	val_104
 114	val_114
 116	val_116
@@ -489,48 +462,48 @@ PREHOOK: query: select * from (select co
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_hbase_table
 PREHOOK: Input: default@empty_normal_table
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-21_501_1398129784538806928/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-01_523_3848783679253772988/-mr-10000
 POSTHOOK: query: select * from (select count(1) as c from empty_normal_table union all select count(1) as c from empty_hbase_table) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_hbase_table
 POSTHOOK: Input: default@empty_normal_table
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-21_501_1398129784538806928/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-01_523_3848783679253772988/-mr-10000
 0
 0
 PREHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_normal_table
 PREHOOK: Input: default@hbase_table_1
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-29_194_1717741306846244710/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-08_760_1058172246489803138/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from empty_normal_table union all select count(1) as c from hbase_table_1) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_normal_table
 POSTHOOK: Input: default@hbase_table_1
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-29_194_1717741306846244710/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-08_760_1058172246489803138/-mr-10000
 0
 155
 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@empty_hbase_table
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-36_929_1877869894302110012/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-16_354_6518537021672889680/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from empty_hbase_table) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@empty_hbase_table
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-36_929_1877869894302110012/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-16_354_6518537021672889680/-mr-10000
 0
 500
 PREHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_1
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-44_477_2750297097097131407/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-24_161_1740152107709241029/-mr-10000
 POSTHOOK: query: select * from (select count(1) c from src union all select count(1) as c from hbase_table_1) x order by c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_1
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-24-44_477_2750297097097131407/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-24_161_1740152107709241029/-mr-10000
 155
 500
 PREHOOK: query: CREATE TABLE hbase_table_3(key int, value string, count int) 
@@ -730,20 +703,20 @@ POSTHOOK: Output: default@hbase_table_3
 PREHOOK: query: select count(1) from hbase_table_3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-01_253_8576430709310957378/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-39_459_7247172747255833113/-mr-10000
 POSTHOOK: query: select count(1) from hbase_table_3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-01_253_8576430709310957378/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-39_459_7247172747255833113/-mr-10000
 155
 PREHOOK: query: select * from hbase_table_3 order by key, value limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-04_391_6395645615304876336/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-42_545_314845276079186727/-mr-10000
 POSTHOOK: query: select * from hbase_table_3 order by key, value limit 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-04_391_6395645615304876336/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-42_545_314845276079186727/-mr-10000
 0	val_0	3
 2	val_2	1
 4	val_4	1
@@ -752,11 +725,11 @@ POSTHOOK: Output: file:/tmp/thiruvel/hiv
 PREHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_3
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-07_505_7507809148698442371/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-45_460_6761948063754904632/-mr-10000
 POSTHOOK: query: select key, count from hbase_table_3 order by key, count desc limit 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_3
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-07_505_7507809148698442371/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-45_460_6761948063754904632/-mr-10000
 0	3
 2	1
 4	1
@@ -792,11 +765,11 @@ POSTHOOK: Output: default@hbase_table_4
 PREHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_4
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-16_096_5831356591320350016/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-53_157_4325075516529075140/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_4 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_4
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-16_096_5831356591320350016/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-53_157_4325075516529075140/-mr-10000
 98	val_98	99	100
 100	val_100	101	102
 PREHOOK: query: DROP TABLE hbase_table_5
@@ -817,11 +790,11 @@ POSTHOOK: Output: default@hbase_table_5
 PREHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_5
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-19_335_8952484238655135527/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-56_459_1940033772917388905/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_5 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_5
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-19_335_8952484238655135527/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-00-56_459_1940033772917388905/-mr-10000
 98	{"b":"val_98","c":"99"}
 100	{"b":"val_100","c":"101"}
 PREHOOK: query: DROP TABLE hbase_table_6
@@ -854,11 +827,11 @@ POSTHOOK: Output: default@hbase_table_6
 PREHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_6
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-28_089_7578738986294123292/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-13_635_4642522102298920586/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_6 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_6
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-28_089_7578738986294123292/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-13_635_4642522102298920586/-mr-10000
 98	{"val_98":"98"}
 100	{"val_100":"100"}
 PREHOOK: query: DROP TABLE hbase_table_7
@@ -893,11 +866,11 @@ POSTHOOK: Output: default@hbase_table_7
 PREHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_7
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-36_674_7610811991249617421/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-21_212_9134808187739697779/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_7 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_7
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-36_674_7610811991249617421/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-21_212_9134808187739697779/-mr-10000
 {"VAL_98":"99.0","val_98":"98"}	98
 {"VAL_100":"101.0","val_100":"100"}	100
 PREHOOK: query: DROP TABLE hbase_table_8
@@ -930,11 +903,11 @@ POSTHOOK: Output: default@hbase_table_8
 PREHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table_8
-PREHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-45_283_2749724644026569867/-mr-10000
+PREHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-28_863_5629781435508376836/-mr-10000
 POSTHOOK: query: SELECT * FROM hbase_table_8 ORDER BY key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table_8
-POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-09-19_23-25-45_283_2749724644026569867/-mr-10000
+POSTHOOK: Output: file:/tmp/thiruvel/hive_2010-10-11_06-04-28_863_5629781435508376836/-mr-10000
 98	val_98	99	100
 100	val_100	101	102
 PREHOOK: query: DROP TABLE hbase_table_1

Modified: hadoop/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out (original)
+++ hadoop/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out Wed Oct 13 04:26:34 2010
@@ -1,69 +1,73 @@
-PREHOOK: query: analyze table src compute statistics
-PREHOOK: type: null
+PREHOOK: query: create table stats_src like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table stats_src like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@stats_src
+PREHOOK: query: insert overwrite table stats_src select * from src
+PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-POSTHOOK: query: analyze table src compute statistics
-POSTHOOK: type: null
+PREHOOK: Output: default@stats_src
+POSTHOOK: query: insert overwrite table stats_src select * from src
+POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src
-PREHOOK: query: desc extended src
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended src
-POSTHOOK: type: DESCTABLE
-key	string	default
-value	string	default
-	 	 
-Detailed Table Information	Table(tableName:src, dbName:default, owner:null, createTime:1284419348, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/src, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{numPartitions=0, numFiles=1, transient_lastDdlTime=1284419360, numRows=500, totalSize=5812}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
-PREHOOK: query: analyze table srcpart partition(ds='2008-04-08', hr=11) compute statistics
-PREHOOK: type: null
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: query: analyze table srcpart partition(ds='2008-04-08', hr=11) compute statistics
-POSTHOOK: type: null
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@srcpart
-POSTHOOK: Output: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: query: analyze table srcpart partition(ds='2008-04-08', hr=12) compute statistics
+POSTHOOK: Output: default@stats_src
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: analyze table stats_src compute statistics
 PREHOOK: type: null
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: query: analyze table srcpart partition(ds='2008-04-08', hr=12) compute statistics
+PREHOOK: Input: default@stats_src
+PREHOOK: Output: default@stats_src
+POSTHOOK: query: analyze table stats_src compute statistics
 POSTHOOK: type: null
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@srcpart
-POSTHOOK: Output: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: query: desc extended srcpart partition(ds='2008-04-08', hr=11)
+POSTHOOK: Input: default@stats_src
+POSTHOOK: Output: default@stats_src
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted stats_src
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended srcpart partition(ds='2008-04-08', hr=11)
+POSTHOOK: query: desc formatted stats_src
 POSTHOOK: type: DESCTABLE
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
 	 	 
-Detailed Partition Information	Partition(values:[2008-04-08, 11], dbName:default, tableName:srcpart, createTime:1284419340, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/srcpart/ds=2008-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1284419365, numRows=500, totalSize=5812})	
-PREHOOK: query: desc extended srcpart partition(ds='2008-04-08', hr=12)
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended srcpart partition(ds='2008-04-08', hr=12)
-POSTHOOK: type: DESCTABLE
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+key                 	string              	default             
+value               	string              	default             
 	 	 
-Detailed Partition Information	Partition(values:[2008-04-08, 12], dbName:default, tableName:srcpart, createTime:1284419342, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/srcpart/ds=2008-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1284419371, numRows=500, totalSize=5812})	
-PREHOOK: query: desc extended srcpart
-PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended srcpart
-POSTHOOK: type: DESCTABLE
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	null                	 
+CreateTime:         	Tue Oct 12 15:19:12 PDT 2010	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.test.build/build/hbase-handler/test/data/warehouse/stats_src	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	EXTERNAL            	FALSE               
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	totalSize           	5812                
+	transient_lastDdlTime	1286921961          
 	 	 
-Detailed Table Information	Table(tableName:srcpart, dbName:default, owner:null, createTime:1284419338, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/srcpart, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=2, numFiles=2, transient_lastDdlTime=1284419371, numRows=1000, totalSize=11624}, viewOriginalText:null, viewExpandedText:null, tableT
 ype:MANAGED_TABLE)	
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: create table hbase_part like srcpart
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table hbase_part like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@hbase_part
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table hbase_part partition (ds='2010-04-08', hr = '11') select key, value from src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -74,6 +78,8 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbase_part@ds=2010-04-08/hr=11
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table hbase_part partition (ds='2010-04-08', hr = '12') select key, value from src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -86,45 +92,175 @@ POSTHOOK: Lineage: hbase_part PARTITION(
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc extended hbase_part
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics
+PREHOOK: type: null
+PREHOOK: Input: default@hbase_part@ds=2010-04-08/hr=11
+PREHOOK: Input: default@hbase_part@ds=2010-04-08/hr=12
+PREHOOK: Output: default@hbase_part
+POSTHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Input: default@hbase_part@ds=2010-04-08/hr=11
+POSTHOOK: Input: default@hbase_part@ds=2010-04-08/hr=12
+POSTHOOK: Output: default@hbase_part
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics
+PREHOOK: type: null
+PREHOOK: Input: default@hbase_part@ds=2010-04-08/hr=11
+PREHOOK: Input: default@hbase_part@ds=2010-04-08/hr=12
+PREHOOK: Output: default@hbase_part
+POSTHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics
+POSTHOOK: type: null
+POSTHOOK: Input: default@hbase_part@ds=2010-04-08/hr=11
+POSTHOOK: Input: default@hbase_part@ds=2010-04-08/hr=12
+POSTHOOK: Output: default@hbase_part
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted hbase_part
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended hbase_part
+POSTHOOK: query: desc formatted hbase_part
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
 	 	 
-Detailed Table Information	Table(tableName:hbase_part, dbName:default, owner:null, createTime:1284419371, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/hbase_part, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{numPartitions=4, EXTERNAL=FALSE, numFiles=4, transient_lastDdlTime=1284419382, numRows=2000, totalSize=23248}, viewOriginalText:null, viewExp
 andedText:null, tableType:MANAGED_TABLE)	
-PREHOOK: query: desc extended hbase_part partition (ds='2010-04-08', hr = '11')
+ds                  	string              	None                
+hr                  	string              	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+Owner:              	null                	 
+CreateTime:         	Tue Oct 12 15:19:22 PDT 2010	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.test.build/build/hbase-handler/test/data/warehouse/hbase_part	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	EXTERNAL            	FALSE               
+	numFiles            	2                   
+	numPartitions       	2                   
+	numRows             	1000                
+	totalSize           	11624               
+	transient_lastDdlTime	1286921975          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted hbase_part partition (ds='2010-04-08', hr = '11')
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended hbase_part partition (ds='2010-04-08', hr = '11')
+POSTHOOK: query: desc formatted hbase_part partition (ds='2010-04-08', hr = '11')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	None                
+hr                  	string              	None                
 	 	 
-Detailed Partition Information	Partition(values:[2010-04-08, 11], dbName:default, tableName:hbase_part, createTime:1284419377, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=11, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1284419377, numRows=500, totalSize=5812})	
-PREHOOK: query: desc extended hbase_part partition (ds='2010-04-08', hr = '12')
+# Detailed Partition Information	 	 
+Partition Value:    	[2010-04-08, 11]    	 
+Database:           	default             	 
+Table:              	hbase_part          	 
+CreateTime:         	Tue Oct 12 15:19:25 PDT 2010	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.test.build/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=11	 
+Partition Parameters:	 	 
+	numFiles            	1                   
+	numRows             	500                 
+	totalSize           	5812                
+	transient_lastDdlTime	1286921965          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted hbase_part partition (ds='2010-04-08', hr = '12')
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended hbase_part partition (ds='2010-04-08', hr = '12')
+POSTHOOK: query: desc formatted hbase_part partition (ds='2010-04-08', hr = '12')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=12).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-key	string	default
-value	string	default
-ds	string	
-hr	string	
+POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	default             
+value               	string              	default             
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	None                
+hr                  	string              	None                
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2010-04-08, 12]    	 
+Database:           	default             	 
+Table:              	hbase_part          	 
+CreateTime:         	Tue Oct 12 15:19:28 PDT 2010	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Location:           	pfile:/home/thiruvel/projects/hive/hive.test.build/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=12	 
+Partition Parameters:	 	 
+	numFiles            	1                   
+	numRows             	500                 
+	totalSize           	5812                
+	transient_lastDdlTime	1286921968          
 	 	 
-Detailed Partition Information	Partition(values:[2010-04-08, 12], dbName:default, tableName:hbase_part, createTime:1284419382, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:pfile:/data/users/nzhang/work/784/apache-hive/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=12, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{numFiles=1, transient_lastDdlTime=1284419382, numRows=500, totalSize=5812})	
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

Modified: hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java (original)
+++ hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java Wed Oct 13 04:26:34 2010
@@ -121,10 +121,10 @@ public class TestHWISessionManager exten
 
     ArrayList<ArrayList<String>> searchBlockRes = searchItem.getResultBucket();
 
-    String resLine = searchBlockRes.get(0).get(2);
+    String resLine = searchBlockRes.get(0).get(0);
     assertEquals(true, resLine.contains("key"));
     assertEquals(true, resLine.contains("int"));
-    String resLine2 = searchBlockRes.get(0).get(3);
+    String resLine2 = searchBlockRes.get(0).get(1);
     assertEquals(true, resLine2.contains("value"));
     assertEquals(true, resLine2.contains("string"));
 

Modified: hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Wed Oct 13 04:26:34 2010
@@ -568,15 +568,13 @@ public class TestJdbcDriver extends Test
     ResultSet res = stmt.executeQuery("describe " + tableName);
 
     res.next();
-    res.next();
-    res.next();
-    assertEquals("Column name 'key' not found", "key", res.getString(1).trim());
+    assertEquals("Column name 'key' not found", "key", res.getString(1));
     assertEquals("Column type 'int' for column key not found", "int", res
-        .getString(2).trim());
+        .getString(2));
     res.next();
-    assertEquals("Column name 'value' not found", "value", res.getString(1).trim());
+    assertEquals("Column name 'value' not found", "value", res.getString(1));
     assertEquals("Column type 'string' for column key not found", "string", res
-        .getString(2).trim());
+        .getString(2));
 
     assertFalse("More results found than expected", res.next());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Oct 13 04:26:34 2010
@@ -1613,26 +1613,52 @@ public class DDLTask extends Task<DDLWor
 			DataOutput outStream = fs.create(resFile);
 
       if (colPath.equals(tableName)) {
-				outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl));
+        if (!descTbl.isFormatted()) {
+          List<FieldSchema> cols = tbl.getCols();
+          if (tableName.equals(colPath)) {
+            cols.addAll(tbl.getPartCols());
+          }
+          outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols));
+        } else {
+          outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl));
+        }
       } else {
-				List<FieldSchema> cols = null;
-        cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
-				outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols));
+        List<FieldSchema> cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
+        if (descTbl.isFormatted()) {
+          outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(cols));
+        } else {
+          outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols));
+        }
       }
 
       if (tableName.equals(colPath)) {
+
+        if (descTbl.isFormatted()) {
+          if (part != null) {
+            outStream.writeBytes(MetaDataFormatUtils.getPartitionInformation(part));
+          } else {
+            outStream.writeBytes(MetaDataFormatUtils.getTableInformation(tbl));
+          }
+        }
+
         // if extended desc table then show the complete details of the table
         if (descTbl.isExt()) {
           // add empty line
           outStream.write(terminator);
           if (part != null) {
             // show partition information
-            outStream.writeBytes(MetaDataFormatUtils.getPartitionInformation(part));
+            outStream.writeBytes("Detailed Partition Information");
+            outStream.write(separator);
+            outStream.writeBytes(part.getTPartition().toString());
+            outStream.write(separator);
             // comment column is empty
             outStream.write(terminator);
           } else {
             // show table information
-            outStream.writeBytes(MetaDataFormatUtils.getTableInformation(tbl));
+            outStream.writeBytes("Detailed Table Information");
+            outStream.write(separator);
+            outStream.writeBytes(tbl.getTTable().toString());
+            outStream.write(separator);
             outStream.write(terminator);
           }
         }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java Wed Oct 13 04:26:34 2010
@@ -26,6 +26,8 @@ import org.apache.hadoop.hive.ql.plan.De
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
+import java.util.Collections;
+import java.util.ArrayList;
 
 /**
  * This class provides methods to format table information.
@@ -59,6 +61,7 @@ public final class MetaDataFormatUtils {
   }
 
   private static void formatColumnsHeader(StringBuilder columnInformation) {
+    columnInformation.append("# "); // Easy for shell scripts to ignore
     formatOutput(getColumnsHeader(), columnInformation);
     columnInformation.append(LINE_DELIM);
   }
@@ -76,11 +79,27 @@ public final class MetaDataFormatUtils {
     }
   }
 
+  /*
+    Displaying columns unformatted for backward compatibility.
+   */
+  public static String displayColsUnformatted(List<FieldSchema> cols) {
+    StringBuilder colBuffer = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+    for (FieldSchema col : cols) {
+      colBuffer.append(col.getName());
+      colBuffer.append(FIELD_DELIM);
+      colBuffer.append(col.getType());
+      colBuffer.append(FIELD_DELIM);
+      colBuffer.append(col.getComment() == null ? "" : col.getComment());
+      colBuffer.append(LINE_DELIM);
+    }
+    return colBuffer.toString();
+  }
+
   public static String getPartitionInformation(Partition part) {
     StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     // Table Metadata
-    tableInfo.append("# Detailed Partition Information").append(LINE_DELIM);
+    tableInfo.append(LINE_DELIM).append("# Detailed Partition Information").append(LINE_DELIM);
     getPartitionMetaDataInformation(tableInfo, part);
 
     // Storage information.
@@ -94,7 +113,7 @@ public final class MetaDataFormatUtils {
     StringBuilder tableInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     // Table Metadata
-    tableInfo.append("# Detailed Table Information").append(LINE_DELIM);
+    tableInfo.append(LINE_DELIM).append("# Detailed Table Information").append(LINE_DELIM);
     getTableMetaDataInformation(tableInfo, table);
 
     // Storage information.
@@ -168,10 +187,11 @@ public final class MetaDataFormatUtils {
   }
 
   private static void displayAllParameters(Map<String, String> params, StringBuilder tableInfo) {
-    for (Map.Entry<String, String> parameter: params.entrySet()) {
+    List<String> keys = new ArrayList<String>(params.keySet());
+    Collections.sort(keys);
+    for (String key : keys) {
       tableInfo.append(FIELD_DELIM); // Ensures all params are indented.
-      formatOutput(parameter.getKey(), StringEscapeUtils.escapeJava(parameter.getValue()),
-          tableInfo);
+      formatOutput(key, StringEscapeUtils.escapeJava(params.get(key)), tableInfo);
     }
   }
 
@@ -181,8 +201,11 @@ public final class MetaDataFormatUtils {
   }
 
   private static String formatDate(long timeInSeconds) {
-    Date date = new Date(timeInSeconds * 1000);
-    return date.toString();
+    if (timeInSeconds != 0) {
+      Date date = new Date(timeInSeconds * 1000);
+      return date.toString();
+    }
+    return "UNKNOWN";
   }
 
   private static void formatOutput(String[] fields, StringBuilder tableInfo) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Oct 13 04:26:34 2010
@@ -790,9 +790,12 @@ public class DDLSemanticAnalyzer extends
       partSpec = getPartSpec(partspec);
     }
 
-    boolean isExt = ast.getChildCount() > 1;
-    DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName,
-        partSpec, isExt);
+    DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec);
+    if (ast.getChildCount() == 2) {
+      int descOptions = ast.getChild(1).getType();
+      descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED);
+      descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED);
+    }
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         descTblDesc), conf));
     setFetchTask(createFetchTask(DescTableDesc.getSchema()));

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Oct 13 04:26:34 2010
@@ -624,7 +624,7 @@ partTypeExpr
 descStatement
 @init { msgs.push("describe statement"); }
 @after { msgs.pop(); }
-    : (KW_DESCRIBE|KW_DESC) (isExtended=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $isExtended?)
+    : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED)? (parttype=partTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?)
     | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?)
     ;
     
@@ -1836,6 +1836,7 @@ KW_TEMPORARY: 'TEMPORARY';
 KW_FUNCTION: 'FUNCTION';
 KW_EXPLAIN: 'EXPLAIN';
 KW_EXTENDED: 'EXTENDED';
+KW_FORMATTED: 'FORMATTED';	 
 KW_SERDE: 'SERDE';
 KW_WITH: 'WITH';
 KW_DEFERRED: 'DEFERRED';

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java Wed Oct 13 04:26:34 2010
@@ -39,6 +39,7 @@ public class DescTableDesc extends DDLDe
   HashMap<String, String> partSpec;
   String resFile;
   boolean isExt;
+  boolean isFormatted;
   /**
    * table name for the result of describe table.
    */
@@ -52,14 +53,14 @@ public class DescTableDesc extends DDLDe
   }
 
   /**
-   * @param isExt
    * @param partSpec
    * @param resFile
    * @param tableName
    */
   public DescTableDesc(Path resFile, String tableName,
-      HashMap<String, String> partSpec, boolean isExt) {
-    this.isExt = isExt;
+      HashMap<String, String> partSpec) {
+    this.isExt = false;
+    this.isFormatted = false;
     this.partSpec = partSpec;
     this.resFile = resFile.toString();
     this.tableName = tableName;
@@ -88,6 +89,21 @@ public class DescTableDesc extends DDLDe
     this.isExt = isExt;
   }
 
+    /**
+   * @return the isFormatted
+   */
+  public boolean isFormatted() {
+    return isFormatted;
+  }
+
+  /**
+   * @param isFormatted
+   *          the isFormat to set
+   */
+  public void setFormatted(boolean isFormat) {
+    this.isFormatted = isFormat;
+  }
+
   /**
    * @return the tableName
    */

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q?rev=1021992&r1=1021991&r2=1021992&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q Wed Oct 13 04:26:34 2010
@@ -40,13 +40,17 @@ SELECT * from view2 where key=18;
 SHOW TABLES 'view.*';
 DESCRIBE view1;
 DESCRIBE EXTENDED view1;
+DESCRIBE FORMATTED view1;
 DESCRIBE view2;
 DESCRIBE EXTENDED view2;
+DESCRIBE FORMATTED view2;
 DESCRIBE view3;
 DESCRIBE EXTENDED view3;
+DESCRIBE FORMATTED view3;
 
 ALTER VIEW view3 SET TBLPROPERTIES ("biggest" = "loser");
 DESCRIBE EXTENDED view3;
+DESCRIBE FORMATTED view3;
 
 CREATE TABLE table1 (key int);
 
@@ -110,6 +114,7 @@ CREATE VIEW view8(c) AS
 SELECT test_translate('abc', 'a', 'b')
 FROM table1;
 DESCRIBE EXTENDED view8;
+DESCRIBE FORMATTED view8;
 SELECT * FROM view8;
 
 -- test usage of a UDAF within a view
@@ -119,12 +124,14 @@ CREATE VIEW view9(m) AS
 SELECT test_max(length(value))
 FROM src;
 DESCRIBE EXTENDED view9;
+DESCRIBE FORMATTED view9;
 SELECT * FROM view9;
 
 -- test usage of a subselect within a view
 CREATE VIEW view10 AS
 SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp;
 DESCRIBE EXTENDED view10;
+DESCRIBE FORMATTED view10;
 SELECT * FROM view10;
 
 -- test usage of a UDTF within a view
@@ -134,12 +141,14 @@ CREATE VIEW view11 AS
 SELECT test_explode(array(1,2,3)) AS (boom)
 FROM table1;
 DESCRIBE EXTENDED view11;
+DESCRIBE FORMATTED view11;
 SELECT * FROM view11;
 
 -- test usage of LATERAL within a view
 CREATE VIEW view12 AS
 SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol;
 DESCRIBE EXTENDED view12;
+DESCRIBE FORMATTED view12;
 SELECT * FROM view12
 ORDER BY key ASC, myCol ASC LIMIT 1;
 
@@ -152,6 +161,7 @@ CREATE VIEW view13 AS
 SELECT s.key
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s;
 DESCRIBE EXTENDED view13;
+DESCRIBE FORMATTED view13;
 SELECT * FROM view13
 ORDER BY key LIMIT 12;
 
@@ -168,6 +178,7 @@ JOIN
       select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2
 ON (unionsrc1.key = unionsrc2.key);
 DESCRIBE EXTENDED view14;
+DESCRIBE FORMATTED view14;
 SELECT * FROM view14
 ORDER BY k1;
 
@@ -177,6 +188,7 @@ SELECT key,COUNT(value) AS value_count
 FROM src
 GROUP BY key;
 DESCRIBE EXTENDED view15;
+DESCRIBE FORMATTED view15;
 SELECT * FROM view15
 ORDER BY value_count DESC, key
 LIMIT 10;
@@ -186,6 +198,7 @@ CREATE VIEW view16 AS
 SELECT DISTINCT value
 FROM src;
 DESCRIBE EXTENDED view16;
+DESCRIBE FORMATTED view16;
 SELECT * FROM view16
 ORDER BY value
 LIMIT 10;