You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2015/01/30 21:48:53 UTC

svn commit: r1656114 - in /hive/trunk: data/files/ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: brock
Date: Fri Jan 30 20:48:52 2015
New Revision: 1656114

URL: http://svn.apache.org/r1656114
Log:
HIVE-9502 - Parquet cannot read Map types from files written with Hive <= 0.12 (Sergio Pena via Brock)

Added:
    hive/trunk/data/files/alltypesparquet   (with props)
    hive/trunk/ql/src/test/queries/clientpositive/parquet_read_backward_compatible_files.q
    hive/trunk/ql/src/test/results/clientpositive/parquet_read_backward_compatible_files.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java

Added: hive/trunk/data/files/alltypesparquet
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/alltypesparquet?rev=1656114&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hive/trunk/data/files/alltypesparquet
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java?rev=1656114&r1=1656113&r2=1656114&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java Fri Jan 30 20:48:52 2015
@@ -53,7 +53,7 @@ public abstract class HiveGroupConverter
     OriginalType annotation = type.getOriginalType();
     if (annotation == OriginalType.LIST) {
       return HiveCollectionConverter.forList(type, parent, index);
-    } else if (annotation == OriginalType.MAP) {
+    } else if (annotation == OriginalType.MAP || annotation == OriginalType.MAP_KEY_VALUE) {
       return HiveCollectionConverter.forMap(type, parent, index);
     }
 

Added: hive/trunk/ql/src/test/queries/clientpositive/parquet_read_backward_compatible_files.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/parquet_read_backward_compatible_files.q?rev=1656114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/parquet_read_backward_compatible_files.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/parquet_read_backward_compatible_files.q Fri Jan 30 20:48:52 2015
@@ -0,0 +1,20 @@
+-- This test makes sure that parquet can read older parquet files written by Hive <= 0.12
+-- alltypesparquet is a files written by older version of Hive
+
+CREATE TABLE alltypesparquet (
+    bo1 boolean,
+    ti1 tinyint,
+    si1 smallint,
+    i1 int,
+    bi1 bigint,
+    f1 float,
+    d1 double,
+    s1 string,
+    m1 map<string,string>,
+    l1 array<int>,
+    st1 struct<c1:int,c2:string>
+) STORED AS PARQUET;
+
+LOAD DATA LOCAL INPATH '../../data/files/alltypesparquet' OVERWRITE INTO TABLE alltypesparquet;
+
+SELECT * FROM alltypesparquet;
\ No newline at end of file

Added: hive/trunk/ql/src/test/results/clientpositive/parquet_read_backward_compatible_files.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/parquet_read_backward_compatible_files.q.out?rev=1656114&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/parquet_read_backward_compatible_files.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/parquet_read_backward_compatible_files.q.out Fri Jan 30 20:48:52 2015
@@ -0,0 +1,55 @@
+PREHOOK: query: -- This test makes sure that parquet can read older parquet files written by Hive <= 0.12
+-- alltypesparquet is a files written by older version of Hive
+
+CREATE TABLE alltypesparquet (
+    bo1 boolean,
+    ti1 tinyint,
+    si1 smallint,
+    i1 int,
+    bi1 bigint,
+    f1 float,
+    d1 double,
+    s1 string,
+    m1 map<string,string>,
+    l1 array<int>,
+    st1 struct<c1:int,c2:string>
+) STORED AS PARQUET
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@alltypesparquet
+POSTHOOK: query: -- This test makes sure that parquet can read older parquet files written by Hive <= 0.12
+-- alltypesparquet is a files written by older version of Hive
+
+CREATE TABLE alltypesparquet (
+    bo1 boolean,
+    ti1 tinyint,
+    si1 smallint,
+    i1 int,
+    bi1 bigint,
+    f1 float,
+    d1 double,
+    s1 string,
+    m1 map<string,string>,
+    l1 array<int>,
+    st1 struct<c1:int,c2:string>
+) STORED AS PARQUET
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@alltypesparquet
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/alltypesparquet' OVERWRITE INTO TABLE alltypesparquet
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@alltypesparquet
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/alltypesparquet' OVERWRITE INTO TABLE alltypesparquet
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@alltypesparquet
+PREHOOK: query: SELECT * FROM alltypesparquet
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesparquet
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM alltypesparquet
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesparquet
+#### A masked pattern was here ####
+true	10	100	1000	10000	4.0	20.0	hello	{"k1":"v1"}	[100,200]	{"c1":10,"c2":"foo"}