You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2013/05/21 01:48:22 UTC

svn commit: r1484626 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: omalley
Date: Mon May 20 23:48:22 2013
New Revision: 1484626

URL: http://svn.apache.org/r1484626
Log:
HIVE-4475 Switch RCFile default to LazyBinaryColumnarSerDe. (Guther Hagleitner
via omalley)

Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/conf/hive-default.xml.template
    hive/trunk/data/conf/hive-site.xml
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/test/queries/clientpositive/rcfile_default_format.q
    hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon May 20 23:48:22 2013
@@ -441,6 +441,10 @@ public class HiveConf extends Configurat
     HIVEQUERYRESULTFILEFORMAT("hive.query.result.fileformat", "TextFile"),
     HIVECHECKFILEFORMAT("hive.fileformat.check", true),
 
+    // default serde for rcfile
+    HIVEDEFAULTRCFILESERDE("hive.default.rcfile.serde", 
+                           "org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"),
+
     //Location of Hive run time structured log file
     HIVEHISTORYFILELOC("hive.querylog.location", "/tmp/" + System.getProperty("user.name")),
 

Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Mon May 20 23:48:22 2013
@@ -374,6 +374,12 @@
 </property>
 
 <property>
+  <name>hive.default.rcfile.serde</name>
+  <value>org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe</value>
+  <description>The default SerDe hive will use for the rcfile format</description>
+</property>
+
+<property>
   <name>hive.fileformat.check</name>
   <value>true</value>
   <description>Whether to check file format or not when loading data files</description>

Modified: hive/trunk/data/conf/hive-site.xml
URL: http://svn.apache.org/viewvc/hive/trunk/data/conf/hive-site.xml?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/data/conf/hive-site.xml (original)
+++ hive/trunk/data/conf/hive-site.xml Mon May 20 23:48:22 2013
@@ -188,4 +188,10 @@
   <description>The default input format, if it is not specified, the system assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always overwrite it - if there is a bug in CombineHiveInputFormat, it can always be manually set to HiveInputFormat. </description>
 </property>
 
+<property>
+  <name>hive.default.rcfile.serde</name>
+  <value>org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe</value>
+  <description>The default SerDe hive will use for the rcfile format</description>
+</property>
+
 </configuration>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Mon May 20 23:48:22 2013
@@ -64,7 +64,6 @@ import org.apache.hadoop.hive.ql.plan.Li
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -121,7 +120,6 @@ public abstract class BaseSemanticAnalyz
       .getName();
   protected static final String ORCFILE_SERDE = OrcSerde.class
       .getName();
-  protected static final String COLUMNAR_SERDE = ColumnarSerDe.class.getName();
 
   class RowFormatParams {
     String fieldDelim = null;
@@ -195,7 +193,7 @@ public abstract class BaseSemanticAnalyz
         inputFormat = RCFILE_INPUT;
         outputFormat = RCFILE_OUTPUT;
         if (shared.serde == null) {
-          shared.serde = COLUMNAR_SERDE;
+          shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
         }
         storageFormat = true;
         break;
@@ -231,7 +229,7 @@ public abstract class BaseSemanticAnalyz
         } else if ("RCFile".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
           inputFormat = RCFILE_INPUT;
           outputFormat = RCFILE_OUTPUT;
-          shared.serde = COLUMNAR_SERDE;
+          shared.serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
         } else if ("ORC".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVEDEFAULTFILEFORMAT))) {
           inputFormat = ORCFILE_INPUT;
           outputFormat = ORCFILE_OUTPUT;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon May 20 23:48:22 2013
@@ -1350,7 +1350,7 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_TBLRCFILE:
       inputFormat = RCFILE_INPUT;
       outputFormat = RCFILE_OUTPUT;
-      serde = COLUMNAR_SERDE;
+      serde = conf.getVar(HiveConf.ConfVars.HIVEDEFAULTRCFILESERDE);
       break;
     case HiveParser.TOK_TBLORCFILE:
       inputFormat = ORCFILE_INPUT;

Modified: hive/trunk/ql/src/test/queries/clientpositive/rcfile_default_format.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/rcfile_default_format.q?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/rcfile_default_format.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/rcfile_default_format.q Mon May 20 23:48:22 2013
@@ -1,19 +1,31 @@
 SET hive.default.fileformat = RCFile;
 
 CREATE TABLE rcfile_default_format (key STRING);
-DESCRIBE EXTENDED rcfile_default_format; 
+DESCRIBE FORMATTED rcfile_default_format; 
 
 CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src;
-DESCRIBE EXTENDED rcfile_default_format_ctas; 
+DESCRIBE FORMATTED rcfile_default_format_ctas; 
 
 CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE;
 INSERT OVERWRITE TABLE rcfile_default_format_txtfile SELECT key from src;
-DESCRIBE EXTENDED rcfile_default_format_txtfile; 
+DESCRIBE FORMATTED rcfile_default_format_txtfile; 
 
 SET hive.default.fileformat = TextFile;
 CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas;
-DESCRIBE EXTENDED textfile_default_format_ctas;
+DESCRIBE FORMATTED textfile_default_format_ctas;
 
+SET hive.default.fileformat = RCFile;
+SET hive.default.rcfile.serde = org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
+CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas;
+DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde;
+
+CREATE TABLE rcfile_default_format_default_serde (key STRING);
+DESCRIBE FORMATTED rcfile_default_format_default_serde;
 
+SET hive.default.fileformat = TextFile;
+CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas;
+DESCRIBE FORMATTED rcfile_ctas_default_serde;
 
+CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile;
+DESCRIBE FORMATTED rcfile_default_serde;
 

Modified: hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out?rev=1484626&r1=1484625&r2=1484626&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/rcfile_default_format.q.out Mon May 20 23:48:22 2013
@@ -3,13 +3,34 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE rcfile_default_format (key STRING)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@rcfile_default_format
-PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format
 POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
 key                 	string              	None                
 	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
 #### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: CREATE TABLE rcfile_default_format_ctas AS SELECT key,value FROM src
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
@@ -17,14 +38,40 @@ POSTHOOK: query: CREATE TABLE rcfile_def
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@rcfile_default_format_ctas
-PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_ctas
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_ctas
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas
 POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
 key                 	string              	None                
 value               	string              	None                
 	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
 #### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5293                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE rcfile_default_format_txtfile (key STRING) STORED AS TEXTFILE
@@ -39,14 +86,40 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@rcfile_default_format_txtfile
 POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-PREHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_txtfile
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: DESCRIBE EXTENDED rcfile_default_format_txtfile
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_txtfile
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
 key                 	string              	None                
 	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
 #### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	rawDataSize         	1406                
+	totalSize           	1906                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
 PREHOOK: query: CREATE TABLE textfile_default_format_ctas AS SELECT key,value FROM rcfile_default_format_ctas
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@rcfile_default_format_ctas
@@ -55,12 +128,194 @@ POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@rcfile_default_format_ctas
 POSTHOOK: Output: default@textfile_default_format_ctas
 POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-PREHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas
+PREHOOK: query: DESCRIBE FORMATTED textfile_default_format_ctas
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: DESCRIBE EXTENDED textfile_default_format_ctas
+POSTHOOK: query: DESCRIBE FORMATTED textfile_default_format_ctas
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
 key                 	string              	None                
 value               	string              	None                
 	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: query: CREATE TABLE rcfile_default_format_ctas_default_serde AS SELECT key,value FROM rcfile_default_format_ctas
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: Output: default@rcfile_default_format_ctas_default_serde
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_ctas_default_serde
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+value               	string              	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5293                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: CREATE TABLE rcfile_default_format_default_serde (key STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE rcfile_default_format_default_serde (key STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@rcfile_default_format_default_serde
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_format_default_serde
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_format_default_serde
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: query: CREATE TABLE rcfile_ctas_default_serde STORED AS rcfile AS SELECT key,value FROM rcfile_default_format_ctas
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@rcfile_default_format_ctas
+POSTHOOK: Output: default@rcfile_ctas_default_serde
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: DESCRIBE FORMATTED rcfile_ctas_default_serde
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_ctas_default_serde
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+value               	string              	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
 #### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	numFiles            	1                   
+	numPartitions       	0                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5293                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE rcfile_default_serde (key STRING) STORED AS rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@rcfile_default_serde
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: DESCRIBE FORMATTED rcfile_default_serde
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED rcfile_default_serde
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: rcfile_default_format_txtfile.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1