You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by go...@apache.org on 2015/05/29 02:51:03 UTC

[56/82] [abbrv] hive git commit: Revert "HIVE-9605:Remove parquet nested objects from wrapper writable objects (Sergio Pena, reviewed by Ferdinand Xu)"

Revert "HIVE-9605:Remove parquet nested objects from wrapper writable objects (Sergio Pena, reviewed by Ferdinand Xu)"

This reverts commit 1f75e348a90d44bbd6a836ecf7228a092c5ff709.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/db8067f9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/db8067f9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/db8067f9

Branch: refs/heads/llap
Commit: db8067f96dc57ad78f24b7128b39892682881423
Parents: c19efd6
Author: Ferdinand Xu <ch...@intel.com>
Authored: Tue May 26 02:39:05 2015 -0400
Committer: Ferdinand Xu <ch...@intel.com>
Committed: Tue May 26 02:39:05 2015 -0400

----------------------------------------------------------------------
 .../benchmark/storage/ColumnarStorageBench.java |  4 +-
 .../convert/HiveCollectionConverter.java        |  5 ++-
 .../io/parquet/convert/HiveGroupConverter.java  | 10 +++++
 .../hive/ql/io/parquet/convert/Repeated.java    |  8 ++--
 .../serde/AbstractParquetMapInspector.java      | 13 +++---
 .../serde/DeepParquetHiveMapInspector.java      |  6 ++-
 .../serde/ParquetHiveArrayInspector.java        | 43 ++++++++++++++-----
 .../serde/StandardParquetHiveMapInspector.java  |  6 +--
 .../ql/io/parquet/TestArrayCompatibility.java   | 44 ++++++++++----------
 .../ql/io/parquet/TestDataWritableWriter.java   | 44 ++++++++++++--------
 .../hive/ql/io/parquet/TestMapStructures.java   | 36 ++++++++--------
 .../hive/ql/io/parquet/TestParquetSerDe.java    |  8 +++-
 .../serde/TestAbstractParquetMapInspector.java  |  4 +-
 .../serde/TestDeepParquetHiveMapInspector.java  |  4 +-
 .../serde/TestParquetHiveArrayInspector.java    |  3 +-
 .../TestStandardParquetHiveMapInspector.java    |  4 +-
 16 files changed, 150 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
----------------------------------------------------------------------
diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
index 4f6985c..61c2eb4 100644
--- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
+++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
@@ -209,13 +209,13 @@ public class ColumnarStorageBench {
         case LIST: {
           List<TypeInfo> elementType = new ArrayList<TypeInfo>();
           elementType.add(((ListTypeInfo) type).getListElementTypeInfo());
-          fields[pos++] = createRecord(elementType);
+          fields[pos++] = record(createRecord(elementType));
         } break;
         case MAP: {
           List<TypeInfo> keyValueType = new ArrayList<TypeInfo>();
           keyValueType.add(((MapTypeInfo) type).getMapKeyTypeInfo());
           keyValueType.add(((MapTypeInfo) type).getMapValueTypeInfo());
-          fields[pos++] = record(createRecord(keyValueType));
+          fields[pos++] = record(record(createRecord(keyValueType)));
         } break;
         case STRUCT: {
           List<TypeInfo> elementType = ((StructTypeInfo) type).getAllStructFieldTypeInfos();

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
index 80405bd..0fd538e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.io.parquet.convert;
 import com.google.common.base.Preconditions;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.Writable;
@@ -82,8 +83,8 @@ public class HiveCollectionConverter extends HiveGroupConverter {
 
   @Override
   public void end() {
-    parent.set(index, new ArrayWritable(
-        Writable.class, list.toArray(new Writable[0])));
+    parent.set(index, wrapList(new ArrayWritable(
+        Writable.class, list.toArray(new Writable[list.size()]))));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
index c6d03a1..4809f9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
@@ -13,6 +13,7 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.convert;
 
+import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.Writable;
 import parquet.io.api.Converter;
 import parquet.io.api.GroupConverter;
@@ -71,6 +72,15 @@ public abstract class HiveGroupConverter extends GroupConverter implements Conve
     return getConverterFromDescription(type.asGroupType(), index, parent);
   }
 
+  /**
+   * The original list and map conversion didn't remove the synthetic layer and
+   * the ObjectInspector had to remove it. This is a temporary fix that adds an
+   * extra layer for the ObjectInspector to remove.
+   */
+  static ArrayWritable wrapList(ArrayWritable list) {
+    return new ArrayWritable(Writable.class, new Writable[] {list});
+  }
+
   public abstract void set(int index, Writable value);
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
index 4becd20..0130aef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
@@ -125,8 +125,8 @@ public interface Repeated extends ConverterParent {
 
     @Override
     public void parentEnd() {
-      parent.set(index, new ArrayWritable(
-          Writable.class, list.toArray(new Writable[list.size()])));
+      parent.set(index, HiveGroupConverter.wrapList(new ArrayWritable(
+          Writable.class, list.toArray(new Writable[list.size()]))));
     }
 
     @Override
@@ -185,8 +185,8 @@ public interface Repeated extends ConverterParent {
 
     @Override
     public void parentEnd() {
-      parent.set(index, new ArrayWritable(
-          Writable.class, list.toArray(new Writable[list.size()])));
+      parent.set(index, wrapList(new ArrayWritable(
+          Writable.class, list.toArray(new Writable[list.size()]))));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
index 49bf1c5..62c61fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
@@ -59,12 +59,15 @@ public abstract class AbstractParquetMapInspector implements SettableMapObjectIn
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapArray = ((ArrayWritable) data).get();
-      if (mapArray == null || mapArray.length == 0) {
+      final Writable[] mapContainer = ((ArrayWritable) data).get();
+
+      if (mapContainer == null || mapContainer.length == 0) {
         return null;
       }
 
+      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
       final Map<Writable, Writable> map = new LinkedHashMap<Writable, Writable>();
+
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();
@@ -88,12 +91,12 @@ public abstract class AbstractParquetMapInspector implements SettableMapObjectIn
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapArray = ((ArrayWritable) data).get();
+      final Writable[] mapContainer = ((ArrayWritable) data).get();
 
-      if (mapArray == null || mapArray.length == 0) {
+      if (mapContainer == null || mapContainer.length == 0) {
         return -1;
       } else {
-        return mapArray.length;
+        return ((ArrayWritable) mapContainer[0]).get().length;
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
index 143d72e..d38c641 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
@@ -40,12 +40,14 @@ public class DeepParquetHiveMapInspector extends AbstractParquetMapInspector {
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapArray = ((ArrayWritable) data).get();
+      final Writable[] mapContainer = ((ArrayWritable) data).get();
 
-      if (mapArray == null || mapArray.length == 0) {
+      if (mapContainer == null || mapContainer.length == 0) {
         return null;
       }
 
+      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
+
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
index bde0dcb..53ca31d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
@@ -56,13 +56,20 @@ public class ParquetHiveArrayInspector implements SettableListObjectInspector {
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] array = ((ArrayWritable) data).get();
-      if (array == null || array.length == 0) {
+      final Writable[] listContainer = ((ArrayWritable) data).get();
+
+      if (listContainer == null || listContainer.length == 0) {
+        return null;
+      }
+
+      final Writable subObj = listContainer[0];
+
+      if (subObj == null) {
         return null;
       }
 
-      if (index >= 0 && index < array.length) {
-        return array[index];
+      if (index >= 0 && index < ((ArrayWritable) subObj).get().length) {
+        return ((ArrayWritable) subObj).get()[index];
       } else {
         return null;
       }
@@ -78,12 +85,19 @@ public class ParquetHiveArrayInspector implements SettableListObjectInspector {
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] array = ((ArrayWritable) data).get();
-      if (array == null || array.length == 0) {
+      final Writable[] listContainer = ((ArrayWritable) data).get();
+
+      if (listContainer == null || listContainer.length == 0) {
         return -1;
       }
 
-      return array.length;
+      final Writable subObj = listContainer[0];
+
+      if (subObj == null) {
+        return 0;
+      }
+
+      return ((ArrayWritable) subObj).get().length;
     }
 
     throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
@@ -96,12 +110,21 @@ public class ParquetHiveArrayInspector implements SettableListObjectInspector {
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] array = ((ArrayWritable) data).get();
-      if (array == null || array.length == 0) {
+      final Writable[] listContainer = ((ArrayWritable) data).get();
+
+      if (listContainer == null || listContainer.length == 0) {
         return null;
       }
 
-      final List<Writable> list = new ArrayList<Writable>(array.length);
+      final Writable subObj = listContainer[0];
+
+      if (subObj == null) {
+        return null;
+      }
+
+      final Writable[] array = ((ArrayWritable) subObj).get();
+      final List<Writable> list = new ArrayList<Writable>();
+
       for (final Writable obj : array) {
         list.add(obj);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
index 22250b3..5aa1448 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
@@ -37,12 +37,12 @@ public class StandardParquetHiveMapInspector extends AbstractParquetMapInspector
       return null;
     }
     if (data instanceof ArrayWritable) {
-      final Writable[] mapArray = ((ArrayWritable) data).get();
+      final Writable[] mapContainer = ((ArrayWritable) data).get();
 
-      if (mapArray == null || mapArray.length == 0) {
+      if (mapContainer == null || mapContainer.length == 0) {
         return null;
       }
-
+      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
index e0ea262..f349aa0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
@@ -62,8 +62,8 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36));
+    ArrayWritable expected = record(list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -109,9 +109,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new FloatWritable(1.0f), new FloatWritable(1.0f)),
-        record(new FloatWritable(2.0f), new FloatWritable(2.0f)));
+        record(new FloatWritable(2.0f), new FloatWritable(2.0f))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -148,8 +148,8 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36));
+    ArrayWritable expected = record(list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -198,9 +198,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new LongWritable(1234L)),
-        record(new LongWritable(2345L)));
+        record(new LongWritable(2345L))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -237,8 +237,8 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36));
+    ArrayWritable expected = record(list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -287,9 +287,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new LongWritable(1234L)),
-        record(new LongWritable(2345L)));
+        record(new LongWritable(2345L))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -339,9 +339,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         new LongWritable(1234L),
-        new LongWritable(2345L));
+        new LongWritable(2345L)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -397,9 +397,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(180.0)));
+        record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -473,10 +473,10 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
         null,
-        record(new DoubleWritable(0.0), new DoubleWritable(180.0)));
+        record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -546,9 +546,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(0.0)));
+        record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -619,9 +619,9 @@ public class TestArrayCompatibility extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(0.0)));
+        record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
index 73425e3..8f03c5b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
@@ -311,11 +311,13 @@ public class TestDataWritableWriter {
         + "}\n";
 
     ArrayWritable hiveRecord = createGroup(
-      createArray(
-          createInt(1),
-          createNull(),
-          createInt(2)
-      )
+        createGroup(
+            createArray(
+                createInt(1),
+                createNull(),
+                createInt(2)
+            )
+        )
     );
 
     // Write record to Parquet format
@@ -361,16 +363,18 @@ public class TestDataWritableWriter {
     ArrayWritable hiveRecord = createGroup(
         createGroup(
             createArray(
-                createString("key1"),
-                createInt(1)
-            ),
-            createArray(
-                createString("key2"),
-                createInt(2)
-            ),
-            createArray(
-                createString("key3"),
-                createNull()
+                createArray(
+                    createString("key1"),
+                    createInt(1)
+                ),
+                createArray(
+                    createString("key2"),
+                    createInt(2)
+                ),
+                createArray(
+                    createString("key3"),
+                    createNull()
+                )
             )
         )
     );
@@ -428,10 +432,14 @@ public class TestDataWritableWriter {
         + "}\n";
 
     ArrayWritable hiveRecord = createGroup(
-        createArray(
+        createGroup(
             createArray(
-                createInt(1),
-                createInt(2)
+                createGroup(
+                    createArray(
+                        createInt(1),
+                        createInt(2)
+                    )
+                )
             )
         )
     );

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
index 32264a4..6fc83e0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
@@ -82,9 +82,9 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new Text("lettuce"), new IntWritable(34)),
-        record(new Text("cabbage"), new IntWritable(18)));
+        record(new Text("cabbage"), new IntWritable(18))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -149,10 +149,10 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
+    ArrayWritable expected = record(list(
         record(new Text("lettuce"), new IntWritable(34)),
         record(new Text("kale"), null),
-        record(new Text("cabbage"), new IntWritable(18)));
+        record(new Text("cabbage"), new IntWritable(18))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -230,9 +230,9 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        record(new Text("green"), record(new Text("lettuce"), new Text("kale"), null)),
-        record(new Text("brown"), null));
+    ArrayWritable expected = record(list(
+        record(new Text("green"), list(new Text("lettuce"), new Text("kale"), null)),
+        record(new Text("brown"), null)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -325,9 +325,9 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        record(new Text("low"), record(new IntWritable(34), new IntWritable(35), null)),
-        record(new Text("high"), record(new IntWritable(340), new IntWritable(360))));
+    ArrayWritable expected = record(list(
+        record(new Text("low"), list(new IntWritable(34), new IntWritable(35), null)),
+        record(new Text("high"), list(new IntWritable(340), new IntWritable(360)))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -386,9 +386,9 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(record(
+    ArrayWritable expected = record(list(record(
         record(new IntWritable(7), new IntWritable(22)),
-        new DoubleWritable(3.14)));
+        new DoubleWritable(3.14))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -447,9 +447,9 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(record(
+    ArrayWritable expected = record(list(record(
         new DoubleWritable(3.14),
-        record(new IntWritable(7), new IntWritable(22))));
+        record(new IntWritable(7), new IntWritable(22)))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -542,13 +542,13 @@ public class TestMapStructures extends AbstractTestParquetDirect {
           }
         });
 
-    ArrayWritable expected = list(
-        record(new Text("a"), record(
+    ArrayWritable expected = record(list(
+        record(new Text("a"), list(
             record(new Text("b"), new IntWritable(1)))),
-        record(new Text("b"), record(
+        record(new Text("b"), list(
             record(new Text("a"), new IntWritable(-1)),
             record(new Text("b"), new IntWritable(-2))))
-    );
+    ));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
index dbb2795..21f889a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
@@ -55,6 +55,7 @@ public class TestParquetSerDe extends TestCase {
       arr[4] = new DoubleWritable((double) 5.3);
       arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes("UTF-8"));
       arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8"));
+      final Writable[] mapContainer = new Writable[1];
       final Writable[] map = new Writable[3];
       for (int i = 0; i < 3; ++i) {
         final Writable[] pair = new Writable[2];
@@ -62,13 +63,16 @@ public class TestParquetSerDe extends TestCase {
         pair[1] = new IntWritable(i);
         map[i] = new ArrayWritable(Writable.class, pair);
       }
-      arr[7] = new ArrayWritable(Writable.class, map);
+      mapContainer[0] = new ArrayWritable(Writable.class, map);
+      arr[7] = new ArrayWritable(Writable.class, mapContainer);
 
+      final Writable[] arrayContainer = new Writable[1];
       final Writable[] array = new Writable[5];
       for (int i = 0; i < 5; ++i) {
         array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8"));
       }
-      arr[8] = new ArrayWritable(Writable.class, array);
+      arrayContainer[0] = new ArrayWritable(Writable.class, array);
+      arr[8] = new ArrayWritable(Writable.class, arrayContainer);
 
       final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
       // Test

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
index f5d9cb4..ef05150 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
@@ -71,9 +71,11 @@ public class TestAbstractParquetMapInspector extends TestCase {
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new IntWritable(3)};
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{
+    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, new Writable[]{
       new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2)});
 
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{internalMap});
+
     final Map<Writable, Writable> expected = new HashMap<Writable, Writable>();
     expected.put(new IntWritable(0), new IntWritable(1));
     expected.put(new IntWritable(2), new IntWritable(3));

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
index 1ca6861..8646ff4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
@@ -58,9 +58,11 @@ public class TestDeepParquetHiveMapInspector extends TestCase {
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new IntWritable(3)};
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{
+    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, new Writable[]{
       new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2)});
 
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{internalMap});
+
     assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
     assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
     assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new ShortWritable((short) 0)));

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
index 0ce654d..f3a24af 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
@@ -58,8 +58,9 @@ public class TestParquetHiveArrayInspector extends TestCase {
 
   @Test
   public void testRegularList() {
-    final ArrayWritable list = new ArrayWritable(Writable.class,
+    final ArrayWritable internalList = new ArrayWritable(Writable.class,
             new Writable[]{new IntWritable(3), new IntWritable(5), new IntWritable(1)});
+    final ArrayWritable list = new ArrayWritable(ArrayWritable.class, new ArrayWritable[]{internalList});
 
     final List<Writable> expected = new ArrayList<Writable>();
     expected.add(new IntWritable(3));

http://git-wip-us.apache.org/repos/asf/hive/blob/db8067f9/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
index 950956a..278419f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
@@ -57,9 +57,11 @@ public class TestStandardParquetHiveMapInspector extends TestCase {
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new IntWritable(3)};
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{
+    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, new Writable[]{
       new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2)});
 
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[]{internalMap});
+
     assertEquals("Wrong result of inspection", new IntWritable(1), inspector.getMapValueElement(map, new IntWritable(0)));
     assertEquals("Wrong result of inspection", new IntWritable(3), inspector.getMapValueElement(map, new IntWritable(2)));
     assertNull("Wrong result of inspection", inspector.getMapValueElement(map, new ShortWritable((short) 0)));