You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/05/27 20:48:39 UTC

svn commit: r1597848 - in /hive/trunk: hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: hashutosh
Date: Tue May 27 18:48:39 2014
New Revision: 1597848

URL: http://svn.apache.org/r1597848
Log:
HIVE-3756 : alter table set fileformat should set serde too (Chinna Rao Lalam via Ashutosh Chauhan)

Modified:
    hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
    hive/trunk/ql/src/test/queries/clientpositive/parquet_serde.q
    hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
    hive/trunk/ql/src/test/results/clientpositive/parquet_serde.q.out
    hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out
    hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
    hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out

Modified: hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java (original)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java Tue May 27 18:48:39 2014
@@ -68,8 +68,10 @@ public class TestUseDatabase extends Tes
     assertEquals(0, response.getResponseCode());
     assertNull(response.getErrorMessage());
 
-    response = hcatDriver.run("alter table " + tblName + " set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
-        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
+    response = hcatDriver.run("alter table " + tblName + " set fileformat "
+        + "INPUTFORMAT  'org.apache.hadoop.hive.ql.io.RCFileInputFormat' "
+        + "OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' "
+        + "serde 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe' inputdriver 'mydriver' outputdriver 'yourdriver'");
     assertEquals(0, response.getResponseCode());
     assertNull(response.getErrorMessage());
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue May 27 18:48:39 2014
@@ -1328,9 +1328,12 @@ public class DDLSemanticAnalyzer extends
           .getText());
       outputFormat = unescapeSQLString(((ASTNode) child.getChild(1)).getToken()
           .getText());
+      serde = unescapeSQLString(((ASTNode) child.getChild(2)).getToken()
+          .getText());
       try {
         Class.forName(inputFormat);
         Class.forName(outputFormat);
+        Class.forName(serde);
       } catch (ClassNotFoundException e) {
         throw new SemanticException(e);
       }
@@ -1347,10 +1350,12 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_TBLSEQUENCEFILE:
       inputFormat = SEQUENCEFILE_INPUT;
       outputFormat = SEQUENCEFILE_OUTPUT;
+      serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName();
       break;
     case HiveParser.TOK_TBLTEXTFILE:
       inputFormat = TEXTFILE_INPUT;
       outputFormat = TEXTFILE_OUTPUT;
+      serde = org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName();
       break;
     case HiveParser.TOK_TBLRCFILE:
       inputFormat = RCFILE_INPUT;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Tue May 27 18:48:39 2014
@@ -1270,8 +1270,8 @@ fileFormat
     | KW_RCFILE  -> ^(TOK_TBLRCFILE)
     | KW_ORCFILE -> ^(TOK_TBLORCFILE)
     | KW_PARQUETFILE -> ^(TOK_TBLPARQUETFILE)
-    | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
-      -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
+    | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral KW_SERDE serdeCls=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+      -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $serdeCls $inDriver? $outDriver?)
     | genericSpec=identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
     ;
 

Modified: hive/trunk/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/exim_04_evolved_parts.q Tue May 27 18:48:39 2014
@@ -13,10 +13,13 @@ alter table exim_employee add columns (e
 alter table exim_employee clustered by (emp_sex, emp_dept) sorted by (emp_id desc) into 5 buckets;
 alter table exim_employee add partition (emp_country='in', emp_state='tn');
 
-alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2');
 alter table exim_employee set fileformat 
-	inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
-	outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat";
+	inputformat  "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
+	outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+        serde        "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe";
+    
+;
+alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2');
 
 alter table exim_employee add partition (emp_country='in', emp_state='ka');
 dfs ${system:test.dfs.mkdir} target/tmp/ql/test/data/exports/exim_employee/temp;
@@ -36,4 +39,4 @@ dfs -rmr target/tmp/ql/test/data/exports
 select * from exim_employee;
 drop table exim_employee;
 
-drop database importer;
\ No newline at end of file
+drop database importer;

Modified: hive/trunk/ql/src/test/queries/clientpositive/parquet_serde.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/parquet_serde.q?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/parquet_serde.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/parquet_serde.q Tue May 27 18:48:39 2014
@@ -22,7 +22,7 @@ ALTER TABLE parquet_mixed_fileformat set
 ALTER TABLE parquet_mixed_fileformat
      SET FILEFORMAT
      INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
-     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat';
+     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe';
 
 DESCRIBE FORMATTED parquet_mixed_fileformat;
 DESCRIBE FORMATTED parquet_mixed_fileformat PARTITION (dateint=20140330);

Modified: hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out Tue May 27 18:48:39 2014
@@ -38,26 +38,28 @@ POSTHOOK: query: alter table exim_employ
 POSTHOOK: type: ALTERTABLE_ADDPARTS
 POSTHOOK: Output: default@exim_employee
 POSTHOOK: Output: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
-PREHOOK: type: ALTERTABLE_SERIALIZER
-PREHOOK: Input: default@exim_employee
-PREHOOK: Output: default@exim_employee
-POSTHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
-POSTHOOK: type: ALTERTABLE_SERIALIZER
-POSTHOOK: Input: default@exim_employee
-POSTHOOK: Output: default@exim_employee
 PREHOOK: query: alter table exim_employee set fileformat 
-	inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
+	inputformat  "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
 	outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+        serde        "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"
 PREHOOK: type: ALTERTABLE_FILEFORMAT
 PREHOOK: Input: default@exim_employee
 PREHOOK: Output: default@exim_employee
 POSTHOOK: query: alter table exim_employee set fileformat 
-	inputformat "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
+	inputformat  "org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat" 
 	outputformat "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"
+        serde        "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"
 POSTHOOK: type: ALTERTABLE_FILEFORMAT
 POSTHOOK: Input: default@exim_employee
 POSTHOOK: Output: default@exim_employee
+PREHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@exim_employee
+PREHOOK: Output: default@exim_employee
+POSTHOOK: query: alter table exim_employee set serde "org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe" with serdeproperties ('serialization.format'='2')
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@exim_employee
+POSTHOOK: Output: default@exim_employee
 PREHOOK: query: alter table exim_employee add partition (emp_country='in', emp_state='ka')
 PREHOOK: type: ALTERTABLE_ADDPARTS
 PREHOOK: Output: default@exim_employee

Modified: hive/trunk/ql/src/test/results/clientpositive/parquet_serde.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/parquet_serde.q.out?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/parquet_serde.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/parquet_serde.q.out Tue May 27 18:48:39 2014
@@ -105,14 +105,14 @@ POSTHOOK: Output: default@parquet_mixed_
 PREHOOK: query: ALTER TABLE parquet_mixed_fileformat
      SET FILEFORMAT
      INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
-     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
 PREHOOK: type: ALTERTABLE_FILEFORMAT
 PREHOOK: Input: default@parquet_mixed_fileformat
 PREHOOK: Output: default@parquet_mixed_fileformat
 POSTHOOK: query: ALTER TABLE parquet_mixed_fileformat
      SET FILEFORMAT
      INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
-     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
+     OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
 POSTHOOK: type: ALTERTABLE_FILEFORMAT
 POSTHOOK: Input: default@parquet_mixed_fileformat
 POSTHOOK: Output: default@parquet_mixed_fileformat

Modified: hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out Tue May 27 18:48:39 2014
@@ -400,8 +400,8 @@ columns:struct columns { string key, str
 partitioned:true
 partitionColumns:struct partition_columns { string dt}
 totalNumberFiles:3
-totalFileSize:1379
-maxFileSize:888
+totalFileSize:1094
+maxFileSize:603
 minFileSize:216
 #### A masked pattern was here ####
 
@@ -469,9 +469,9 @@ columns:struct columns { string key, str
 partitioned:true
 partitionColumns:struct partition_columns { string dt}
 totalNumberFiles:1
-totalFileSize:888
-maxFileSize:888
-minFileSize:888
+totalFileSize:603
+maxFileSize:603
+minFileSize:603
 #### A masked pattern was here ####
 
 PREHOOK: query: select key from partition_test_partitioned where dt=100

Modified: hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out Tue May 27 18:48:39 2014
@@ -83,14 +83,14 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: partition_test_partitioned
-            Statistics: Num rows: 75 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 75 Data size: 548 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: key (type: string), value (type: string), dt (type: string), BLOCK__OFFSET__INSIDE__FILE (type: bigint)
               outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 75 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 75 Data size: 548 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 75 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 75 Data size: 548 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -217,11 +217,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: partition_test_partitioned
-          Statistics: Num rows: 75 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 75 Data size: 548 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: key (type: string), value (type: string), dt (type: string), BLOCK__OFFSET__INSIDE__FILE (type: bigint)
             outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 75 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 75 Data size: 548 Basic stats: COMPLETE Column stats: NONE
             ListSink
 
 PREHOOK: query: select * from partition_test_partitioned where dt >=100 and dt <= 102

Modified: hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out?rev=1597848&r1=1597847&r2=1597848&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out Tue May 27 18:48:39 2014
@@ -80,9 +80,9 @@ columns:struct columns { string key, str
 partitioned:true
 partitionColumns:struct partition_columns { string dt}
 totalNumberFiles:1
-totalFileSize:888
-maxFileSize:888
-minFileSize:888
+totalFileSize:603
+maxFileSize:603
+minFileSize:603
 #### A masked pattern was here ####
 
 PREHOOK: query: select key from partition_test_partitioned where dt=102
@@ -156,9 +156,9 @@ columns:struct columns { string key, str
 partitioned:true
 partitionColumns:struct partition_columns { string dt}
 totalNumberFiles:1
-totalFileSize:888
-maxFileSize:888
-minFileSize:888
+totalFileSize:603
+maxFileSize:603
+minFileSize:603
 #### A masked pattern was here ####
 
 PREHOOK: query: select key from partition_test_partitioned where dt=101