You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by am...@apache.org on 2019/06/24 00:58:47 UTC

[drill] branch master updated (f3f7dbd -> 205e028)

This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from f3f7dbd  DRILL-7292: Remove V1 and V2 text readers
     new ddc2c02  DRILL-7268: Read Hive array with parquet native reader
     new d108677  DRILL-7297: Query hangs in planning stage when Error is thrown
     new 205e028  DRILL-7302: Bump Apache Avro to 1.9.0

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../drill/exec/store/hive/HiveUtilities.java       |    3 +-
 .../exec/hive/complex_types/TestHiveArrays.java    | 1439 ++++++++------------
 .../resourcemgr/config/selectors/AclSelector.java  |    2 +-
 .../config/selectors/NotEqualSelector.java         |    2 +-
 .../store/parquet2/DrillParquetGroupConverter.java |  349 +++--
 .../exec/store/parquet2/DrillParquetReader.java    |   30 +-
 .../parquet2/DrillParquetRecordMaterializer.java   |    2 +-
 .../apache/drill/exec/work/foreman/Foreman.java    |    8 +-
 .../java/org/apache/drill/TestFunctionsQuery.java  |    8 +
 .../exec/fn/impl/testing/CustomErrorFunction.java} |   29 +-
 .../impl/join/TestLateralJoinCorrectness.java      |    2 +-
 .../TestLateralJoinCorrectnessBatchProcessing.java |    2 +-
 .../exec/physical/impl/scan/ScanTestUtils.java     |    2 +-
 .../store/parquet2/TestDrillParquetReader.java     |  855 ++++++++++++
 .../test/resources/parquet2/hive_arrays_p.parquet  |  Bin 0 -> 14270 bytes
 pom.xml                                            |    2 +-
 16 files changed, 1697 insertions(+), 1038 deletions(-)
 copy exec/java-exec/src/{main/java/org/apache/drill/exec/expr/fn/impl/Not.java => test/java/org/apache/drill/exec/fn/impl/testing/CustomErrorFunction.java} (65%)
 create mode 100755 exec/java-exec/src/test/resources/parquet2/hive_arrays_p.parquet


[drill] 01/03: DRILL-7268: Read Hive array with parquet native reader

Posted by am...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit ddc2c02eb863dbea3b46f69bc3a33803397c35bf
Author: Igor Guzenko <ih...@gmail.com>
AuthorDate: Thu Jun 6 10:47:57 2019 +0300

    DRILL-7268: Read Hive array with parquet native reader
    
    1. Fixed preserving of group originalType for projected schema
       in DrillParquetReader
    2. Added reading of LIST logical type to DrillParquetGroupConverter.
       Intermediate noop converter used to skip writing for next nested
       repeated field after recognition of parent field as LIST. For this
       skipRepeated 'true' passed to child converter's constructor.
    
    close apache/drill#1805
---
 .../drill/exec/store/hive/HiveUtilities.java       |    3 +-
 .../exec/hive/complex_types/TestHiveArrays.java    | 1439 ++++++++------------
 .../store/parquet2/DrillParquetGroupConverter.java |  349 +++--
 .../exec/store/parquet2/DrillParquetReader.java    |   30 +-
 .../parquet2/DrillParquetRecordMaterializer.java   |    2 +-
 .../store/parquet2/TestDrillParquetReader.java     |  855 ++++++++++++
 .../test/resources/parquet2/hive_arrays_p.parquet  |  Bin 0 -> 14270 bytes
 7 files changed, 1664 insertions(+), 1014 deletions(-)

diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index f2b5a28..1b1c3e3 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -706,8 +706,7 @@ public class HiveUtilities {
       final Category category = TypeInfoUtils.getTypeInfoFromTypeString(hiveField.getType()).getCategory();
       if (category == Category.MAP ||
           category == Category.STRUCT ||
-          category == Category.UNION ||
-          category == Category.LIST) {
+          category == Category.UNION) {
         logger.debug("Hive table contains unsupported data type: {}", category);
         return true;
       }
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
index d3aa2ef..39f6a1c 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
@@ -21,8 +21,13 @@ import java.math.BigDecimal;
 import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 import org.apache.drill.categories.HiveStorageTest;
+import org.apache.drill.categories.SlowTest;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.hive.HiveTestFixture;
 import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
@@ -39,15 +44,21 @@ import static java.util.Arrays.asList;
 import static java.util.Collections.emptyList;
 import static org.apache.drill.exec.expr.fn.impl.DateUtility.parseBest;
 import static org.apache.drill.exec.expr.fn.impl.DateUtility.parseLocalDate;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertThat;
 
-@Category({HiveStorageTest.class})
+@Category({SlowTest.class, HiveStorageTest.class})
 public class TestHiveArrays extends ClusterTest {
 
   private static HiveTestFixture hiveTestFixture;
 
+  private static final String[] TYPES = {"int", "string", "varchar(5)", "char(2)", "tinyint",
+      "smallint", "decimal(9,3)", "boolean", "bigint", "float", "double", "date", "timestamp"};
+
   @BeforeClass
   public static void setUp() throws Exception {
-    startCluster(ClusterFixture.builder(dirTestWatcher));
+    startCluster(ClusterFixture.builder(dirTestWatcher)
+        .sessionOption(ExecConstants.HIVE_OPTIMIZE_PARQUET_SCAN_WITH_NATIVE_READER, true));
     hiveTestFixture = HiveTestFixture.builder(dirTestWatcher).build();
     hiveTestFixture.getDriverManager().runWithinSession(TestHiveArrays::generateData);
     hiveTestFixture.getPluginManager().addHivePluginTo(cluster.drillbit());
@@ -61,87 +72,13 @@ public class TestHiveArrays extends ClusterTest {
   }
 
   private static void generateData(Driver d) {
-    // int_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE int_array(rid INT, arr_n_0 ARRAY<INT>, arr_n_1 ARRAY<ARRAY<INT>>,arr_n_2 ARRAY<ARRAY<ARRAY<INT>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "int_array", Paths.get("complex_types/array/int_array.json"));
-
-    // string_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE string_array(rid INT, arr_n_0 ARRAY<STRING>, arr_n_1 ARRAY<ARRAY<STRING>>,arr_n_2 ARRAY<ARRAY<ARRAY<STRING>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "string_array", Paths.get("complex_types/array/string_array.json"));
-
-    // varchar_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE varchar_array(rid INT, arr_n_0 ARRAY<VARCHAR(5)>,arr_n_1 ARRAY<ARRAY<VARCHAR(5)>>,arr_n_2 ARRAY<ARRAY<ARRAY<VARCHAR(5)>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "varchar_array", Paths.get("complex_types/array/varchar_array.json"));
-
-    // char_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE char_array(rid INT, arr_n_0 ARRAY<CHAR(2)>,arr_n_1 ARRAY<ARRAY<CHAR(2)>>, arr_n_2 ARRAY<ARRAY<ARRAY<CHAR(2)>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "char_array", Paths.get("complex_types/array/char_array.json"));
-
-    // tinyint_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE tinyint_array(rid INT, arr_n_0 ARRAY<TINYINT>, arr_n_1 ARRAY<ARRAY<TINYINT>>, arr_n_2 ARRAY<ARRAY<ARRAY<TINYINT>>> ) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "tinyint_array", Paths.get("complex_types/array/tinyint_array.json"));
-
-    // smallint_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE smallint_array(rid INT, arr_n_0 ARRAY<SMALLINT>, arr_n_1 ARRAY<ARRAY<SMALLINT>>, arr_n_2 ARRAY<ARRAY<ARRAY<SMALLINT>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "smallint_array", Paths.get("complex_types/array/smallint_array.json"));
-
-    // decimal_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE decimal_array(rid INT, arr_n_0 ARRAY<DECIMAL(9,3)>, arr_n_1 ARRAY<ARRAY<DECIMAL(9,3)>>,arr_n_2 ARRAY<ARRAY<ARRAY<DECIMAL(9,3)>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "decimal_array", Paths.get("complex_types/array/decimal_array.json"));
-
-    // boolean_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE boolean_array(rid INT, arr_n_0 ARRAY<BOOLEAN>, arr_n_1 ARRAY<ARRAY<BOOLEAN>>,arr_n_2 ARRAY<ARRAY<ARRAY<BOOLEAN>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "boolean_array", Paths.get("complex_types/array/boolean_array.json"));
-
-    // bigint_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE bigint_array(rid INT, arr_n_0 ARRAY<BIGINT>, arr_n_1 ARRAY<ARRAY<BIGINT>>,arr_n_2 ARRAY<ARRAY<ARRAY<BIGINT>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "bigint_array", Paths.get("complex_types/array/bigint_array.json"));
-
-    // float_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE float_array(rid INT, arr_n_0 ARRAY<FLOAT>, arr_n_1 ARRAY<ARRAY<FLOAT>>,arr_n_2 ARRAY<ARRAY<ARRAY<FLOAT>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "float_array", Paths.get("complex_types/array/float_array.json"));
-
-    // double_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE double_array(rid INT, arr_n_0 ARRAY<DOUBLE>, arr_n_1 ARRAY<ARRAY<DOUBLE>>, arr_n_2 ARRAY<ARRAY<ARRAY<DOUBLE>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "double_array", Paths.get("complex_types/array/double_array.json"));
-
-    // date_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE date_array(rid INT, arr_n_0 ARRAY<DATE>, arr_n_1 ARRAY<ARRAY<DATE>>,arr_n_2 ARRAY<ARRAY<ARRAY<DATE>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "date_array", Paths.get("complex_types/array/date_array.json"));
-
-    // timestamp_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE timestamp_array(rid INT, arr_n_0 ARRAY<TIMESTAMP>, arr_n_1 ARRAY<ARRAY<TIMESTAMP>>,arr_n_2 ARRAY<ARRAY<ARRAY<TIMESTAMP>>>) " +
-            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE");
-    HiveTestUtilities.loadData(d, "timestamp_array", Paths.get("complex_types/array/timestamp_array.json"));
+    Stream.of(TYPES).forEach(type -> {
+      createJsonTable(d, type);
+      createParquetTable(d, type);
+    });
 
     // binary_array
-    HiveTestUtilities.executeQuery(d,
-        "CREATE TABLE binary_array(arr_n_0 ARRAY<BINARY>) STORED AS TEXTFILE");
+    HiveTestUtilities.executeQuery(d, "CREATE TABLE binary_array(arr_n_0 ARRAY<BINARY>) STORED AS TEXTFILE");
     HiveTestUtilities.executeQuery(d, "insert into binary_array select array(binary('First'),binary('Second'),binary('Third'))");
     HiveTestUtilities.executeQuery(d, "insert into binary_array select array(binary('First'))");
 
@@ -205,73 +142,88 @@ public class TestHiveArrays extends ClusterTest {
     );
   }
 
+  private static void createJsonTable(Driver d, String type) {
+    String tableName = getTableNameFromType(type);
+    String ddl = String.format(
+        "CREATE TABLE %s(rid INT, arr_n_0 ARRAY<%2$s>, arr_n_1 ARRAY<ARRAY<%2$s>>, arr_n_2 ARRAY<ARRAY<ARRAY<%2$s>>>) " +
+            "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe' STORED AS TEXTFILE",
+        tableName, type.toUpperCase());
+
+    HiveTestUtilities.executeQuery(d, ddl);
+    HiveTestUtilities.loadData(d, tableName, Paths.get(String.format("complex_types/array/%s.json", tableName)));
+  }
+
+  private static void createParquetTable(Driver d, String type) {
+      String from = getTableNameFromType(type);
+      String to = from.concat("_p");
+      String ddl = String.format(
+          "CREATE TABLE %s(rid INT, arr_n_0 ARRAY<%2$s>, arr_n_1 ARRAY<ARRAY<%2$s>>, arr_n_2 ARRAY<ARRAY<ARRAY<%2$s>>>) STORED AS PARQUET",
+          to, type.toUpperCase());
+      HiveTestUtilities.executeQuery(d, ddl);
+      HiveTestUtilities.insertData(d, from, to);
+  }
+
+  private static String getTableNameFromType(String type) {
+    String tblType = type.split("\\(")[0];
+    return tblType.toLowerCase() + "_array";
+  }
+
   @Test
   public void intArray() throws Exception {
+    checkIntArrayInTable("int_array");
+  }
+
+  @Test
+  public void intArrayParquet() throws Exception {
+    checkNativeScanUsed("int_array_p");
+    checkIntArrayInTable("int_array_p");
+  }
+
+  private void checkNativeScanUsed(String table) throws Exception {
+    String plan = queryBuilder().sql("SELECT rid FROM hive.`%s`", table).explainText();
+    assertThat(plan, containsString("HiveDrillNativeParquetScan"));
+  }
 
+  private void checkIntArrayInTable(String tableName) throws Exception {
     // Nesting 0: reading ARRAY<INT>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`int_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", tableName)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-1, 0, 1))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(100500))
+        .baselineValues(asList(-1, 0, 1))
+        .baselineValues(emptyList())
+        .baselineValues(asList(100500))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<INT>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`int_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", tableName)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(-1, 0, 1), asList(-2, 1)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(100500, 500100)))
+        .baselineValues(asList(asList(-1, 0, 1), asList(-2, 1)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(100500, 500100)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<INT>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`int_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", tableName)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(7, 81),//[0][0]
-                    asList(-92, 54, -83),//[0][1]
-                    asList(-10, -59)//[0][2]
-                ),
-                asList( // [1]
-                    asList(-43, -80)//[1][0]
-                ),
-                asList( // [2]
-                    asList(-70, -62)//[2][0]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(34, -18)//[0][0]
-                ),
-                asList( // [1]
-                    asList(-87, 87),//[1][0]
-                    asList(52, 58),//[1][1]
-                    asList(58, 20, -81),//[1][2]
-                    asList(-94, -93)//[1][3]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(-56, 9),//[0][0]
-                    asList(39, 5)//[0][1]
-                ),
-                asList( // [1]
-                    asList(28, 88, -28)//[1][0]
-                )
-            )
-        ).go();
+        .baselineValues(asList(
+            asList(asList(7, 81), asList(-92, 54, -83), asList(-10, -59)),
+            asList(asList(-43, -80)),
+            asList(asList(-70, -62))
+        ))
+        .baselineValues(asList(
+            asList(asList(34, -18)),
+            asList(asList(-87, 87), asList(52, 58), asList(58, 20, -81), asList(-94, -93))
+        ))
+        .baselineValues(asList(
+            asList(asList(-56, 9), asList(39, 5)),
+            asList(asList(28, 88, -28))
+        ))
+        .go();
   }
 
   @Test
@@ -374,7 +326,7 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("DESCRIBE hive.`int_array` arr_n_0")
         .unOrdered()
         .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
-        .baselineValues("arr_n_0", "ARRAY", "YES") //todo: fix to ARRAY<INTEGER>
+        .baselineValues("arr_n_0", "ARRAY", "YES")//todo: fix to ARRAY<INTEGER>
         .go();
     testBuilder()
         .sqlQuery("DESCRIBE hive.`int_array` arr_n_1")
@@ -411,101 +363,64 @@ public class TestHiveArrays extends ClusterTest {
 
   @Test
   public void stringArray() throws Exception {
+    checkStringArrayInTable("string_array");
+  }
+
+  @Test
+  public void stringArrayParquet() throws Exception {
+    checkNativeScanUsed("string_array_p");
+    checkStringArrayInTable("string_array_p");
+  }
+
+  private void checkStringArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<STRING>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`string_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(new Text("First Value Of Array"), new Text("komlnp"), new Text("The Last Value")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(new Text("ABCaBcA-1-2-3")))
+        .baselineValues(asTextList("First Value Of Array", "komlnp", "The Last Value"))
+        .baselineValues(emptyList())
+        .baselineValues(asTextList("ABCaBcA-1-2-3"))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<STRING>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`string_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(new Text("Array 0, Value 0"), new Text("Array 0, Value 1")), asList(new Text("Array 1"))))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(new Text("One"))))
+        .baselineValues(asList(asTextList("Array 0, Value 0", "Array 0, Value 1"), asTextList("Array 1")))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asTextList("One")))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<STRING>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`string_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("dhMGOr1QVO"), new Text("NZpzBl"), new Text("LC8mjYyOJ7l8dHUpk"))//[0][0]
-                ),
-                asList( // [1]
-                    asList(new Text("JH")),//[1][0]
-                    asList(new Text("aVxgfxAu")),//[1][1]
-                    asList(new Text("fF amN8z8"))//[1][2]
-                ),
-                asList( // [2]
-                    asList(new Text("denwte5R39dSb2PeG"), new Text("Gbosj97RXTvBK1w"), new Text("S3whFvN")),//[2][0]
-                    asList(new Text("2sNbYGQhkt303Gnu"), new Text("rwG"), new Text("SQH766A8XwHg2pTA6a"))//[2][1]
-                ),
-                asList( // [3]
-                    asList(new Text("L"), new Text("khGFDtDluFNoo5hT")),//[3][0]
-                    asList(new Text("b8")),//[3][1]
-                    asList(new Text("Z"))//[3][2]
-                ),
-                asList( // [4]
-                    asList(new Text("DTEuW"), new Text("b0Wt84hIl"), new Text("A1H")),//[4][0]
-                    asList(new Text("h2zXh3Qc"), new Text("NOcgU8"), new Text("RGfVgv2rvDG")),//[4][1]
-                    asList(new Text("Hfn1ov9hB7fZN"), new Text("0ZgCD3"))//[4][2]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("nk"), new Text("HA"), new Text("CgAZCxTbTrFWJL3yM")),//[0][0]
-                    asList(new Text("T7fGXYwtBb"), new Text("G6vc")),//[0][1]
-                    asList(new Text("GrwB5j3LBy9")),//[0][2]
-                    asList(new Text("g7UreegD1H97"), new Text("dniQ5Ehhps7c1pBuM"), new Text("S wSNMGj7c")),//[0][3]
-                    asList(new Text("iWTEJS0"), new Text("4F"))//[0][4]
-                ),
-                asList( // [1]
-                    asList(new Text("YpRcC01u6i6KO"), new Text("ujpMrvEfUWfKm"), new Text("2d")),//[1][0]
-                    asList(new Text("2"), new Text("HVDH"), new Text("5Qx Q6W112"))//[1][1]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("S8d2vjNu680hSim6iJ")),//[0][0]
-                    asList(new Text("lRLaT9RvvgzhZ3C"), new Text("igSX1CP"), new Text("FFZMwMvAOod8")),//[0][1]
-                    asList(new Text("iBX"), new Text("sG")),//[0][2]
-                    asList(new Text("ChRjuDPz99WeU9"), new Text("2gBBmMUXV9E5E"), new Text(" VkEARI2upO"))//[0][3]
-                ),
-                asList( // [1]
-                    asList(new Text("UgMok3Q5wmd")),//[1][0]
-                    asList(new Text("8Zf9CLfUSWK"), new Text(""), new Text("NZ7v")),//[1][1]
-                    asList(new Text("vQE3I5t26"), new Text("251BeQJue"))//[1][2]
-                ),
-                asList( // [2]
-                    asList(new Text("Rpo8"))//[2][0]
-                ),
-                asList( // [3]
-                    asList(new Text("jj3njyupewOM Ej0pu"), new Text("aePLtGgtyu4aJ5"), new Text("cKHSvNbImH1MkQmw0Cs")),//[3][0]
-                    asList(new Text("VSO5JgI2x7TnK31L5"), new Text("hIub"), new Text("eoBSa0zUFlwroSucU")),//[3][1]
-                    asList(new Text("V8Gny91lT"), new Text("5hBncDZ"))//[3][2]
-                ),
-                asList( // [4]
-                    asList(new Text("Y3"), new Text("StcgywfU"), new Text("BFTDChc")),//[4][0]
-                    asList(new Text("5JNwXc2UHLld7"), new Text("v")),//[4][1]
-                    asList(new Text("9UwBhJMSDftPKuGC")),//[4][2]
-                    asList(new Text("E hQ9NJkc0GcMlB"), new Text("IVND1Xp1Nnw26DrL9"))//[4][3]
-                )
-            )
-        ).go();
+        .baselineValues(asList(
+            asList(asTextList("dhMGOr1QVO", "NZpzBl", "LC8mjYyOJ7l8dHUpk")),
+            asList(asTextList("JH"), asTextList("aVxgfxAu"), asTextList("fF amN8z8")),
+            asList(asTextList("denwte5R39dSb2PeG", "Gbosj97RXTvBK1w", "S3whFvN"), asTextList("2sNbYGQhkt303Gnu", "rwG", "SQH766A8XwHg2pTA6a")),
+            asList(asTextList("L", "khGFDtDluFNoo5hT"), asTextList("b8"), asTextList("Z")),
+            asList(asTextList("DTEuW", "b0Wt84hIl", "A1H"), asTextList("h2zXh3Qc", "NOcgU8", "RGfVgv2rvDG"), asTextList("Hfn1ov9hB7fZN", "0ZgCD3"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("nk", "HA", "CgAZCxTbTrFWJL3yM"), asTextList("T7fGXYwtBb", "G6vc"), asTextList("GrwB5j3LBy9"),
+                asTextList("g7UreegD1H97", "dniQ5Ehhps7c1pBuM", "S wSNMGj7c"), asTextList("iWTEJS0", "4F")),
+            asList(asTextList("YpRcC01u6i6KO", "ujpMrvEfUWfKm", "2d"), asTextList("2", "HVDH", "5Qx Q6W112"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("S8d2vjNu680hSim6iJ"), asTextList("lRLaT9RvvgzhZ3C", "igSX1CP", "FFZMwMvAOod8"),
+                asTextList("iBX", "sG"), asTextList("ChRjuDPz99WeU9", "2gBBmMUXV9E5E", " VkEARI2upO")),
+            asList(asTextList("UgMok3Q5wmd"), asTextList("8Zf9CLfUSWK", "", "NZ7v"), asTextList("vQE3I5t26", "251BeQJue")),
+            asList(asTextList("Rpo8")),
+            asList(asTextList("jj3njyupewOM Ej0pu", "aePLtGgtyu4aJ5", "cKHSvNbImH1MkQmw0Cs"), asTextList("VSO5JgI2x7TnK31L5", "hIub", "eoBSa0zUFlwroSucU"),
+                asTextList("V8Gny91lT", "5hBncDZ")),
+            asList(asTextList("Y3", "StcgywfU", "BFTDChc"), asTextList("5JNwXc2UHLld7", "v"), asTextList("9UwBhJMSDftPKuGC"),
+                asTextList("E hQ9NJkc0GcMlB", "IVND1Xp1Nnw26DrL9"))
+        ))
+        .go();
   }
 
   @Test
@@ -514,35 +429,10 @@ public class TestHiveArrays extends ClusterTest {
     testBuilder()
         .sqlQuery("SELECT arr_n_0[0], arr_n_0[1], arr_n_1[0], arr_n_1[1], arr_n_0[3], arr_n_1[3] FROM hive.`string_array`")
         .unOrdered()
-        .baselineColumns(
-            "EXPR$0",
-            "EXPR$1",
-            "EXPR$2",
-            "EXPR$3",
-            "EXPR$4",
-            "EXPR$5")
-        .baselineValues(
-            "First Value Of Array",
-            "komlnp",
-            asList(new Text("Array 0, Value 0"), new Text("Array 0, Value 1")),
-            asList(new Text("Array 1")),
-            null,
-            emptyList()
-        )
-        .baselineValues(
-            null,
-            null,
-            emptyList(),
-            emptyList(),
-            null,
-            emptyList())
-        .baselineValues(
-            "ABCaBcA-1-2-3",
-            null,
-            asList(new Text("One")),
-            emptyList(),
-            null,
-            emptyList())
+        .baselineColumns("EXPR$0", "EXPR$1", "EXPR$2", "EXPR$3", "EXPR$4", "EXPR$5")
+        .baselineValues("First Value Of Array", "komlnp", asTextList("Array 0, Value 0", "Array 0, Value 1"), asTextList("Array 1"), null, emptyList())
+        .baselineValues(null, null, emptyList(), emptyList(), null, emptyList())
+        .baselineValues("ABCaBcA-1-2-3", null, asTextList("One"), emptyList(), null, emptyList())
         .go();
   }
 
@@ -553,9 +443,9 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_0 FROM hive.`varchar_array`")
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(new Text("Five"), new Text("One"), new Text("T")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(new Text("ZZ0"), new Text("-c54g"), new Text("ooo"), new Text("k22k")))
+        .baselineValues(asTextList("Five", "One", "T"))
+        .baselineValues(emptyList())
+        .baselineValues(asTextList("ZZ0", "-c54g", "ooo", "k22k"))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<VARCHAR(5)>>
@@ -563,12 +453,9 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_1 FROM hive.`varchar_array`")
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
-            asList(new Text("Five"), new Text("One"), new Text("$42")),
-            asList(new Text("T"), new Text("K"), new Text("O"))
-        ))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(new Text("-c54g"))))
+        .baselineValues(asList(asTextList("Five", "One", "$42"), asTextList("T", "K", "O")))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asTextList("-c54g")))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<VARCHAR(5)>>>
@@ -576,58 +463,21 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_2 FROM hive.`varchar_array` order by rid")
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("")),//[0][0]
-                    asList(new Text("Gt"), new Text(""), new Text("")),//[0][1]
-                    asList(new Text("9R3y")),//[0][2]
-                    asList(new Text("X3a4"))//[0][3]
-                ),
-                asList( // [1]
-                    asList(new Text("o"), new Text("6T"), new Text("QKAZ")),//[1][0]
-                    asList(new Text(""), new Text("xf8r"), new Text("As")),//[1][1]
-                    asList(new Text("5kS3"))//[1][2]
-                ),
-                asList( // [2]
-                    asList(new Text(""), new Text("S7Gx")),//[2][0]
-                    asList(new Text("ml"), new Text("27pL"), new Text("VPxr")),//[2][1]
-                    asList(new Text("")),//[2][2]
-                    asList(new Text("e"), new Text("Dj"))//[2][3]
-                ),
-                asList( // [3]
-                    asList(new Text(""), new Text("XYO"), new Text("fEWz")),//[3][0]
-                    asList(new Text(""), new Text("oU")),//[3][1]
-                    asList(new Text("o 8"), new Text(""), new Text("")),//[3][2]
-                    asList(new Text("giML"), new Text("H7g")),//[3][3]
-                    asList(new Text("SWX9"), new Text("H"), new Text("emwt"))//[3][4]
-                ),
-                asList( // [4]
-                    asList(new Text("Sp"))//[4][0]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("GCx")),//[0][0]
-                    asList(new Text(""), new Text("V")),//[0][1]
-                    asList(new Text("pF"), new Text("R7"), new Text("")),//[0][2]
-                    asList(new Text(""), new Text("AKal"))//[0][3]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("m"), new Text("MBAv"), new Text("7R9F")),//[0][0]
-                    asList(new Text("ovv")),//[0][1]
-                    asList(new Text("p 7l"))//[0][2]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asTextList(""), asTextList("Gt", "", ""), asTextList("9R3y"), asTextList("X3a4")),
+            asList(asTextList("o", "6T", "QKAZ"), asTextList("", "xf8r", "As"), asTextList("5kS3")),
+            asList(asTextList("", "S7Gx"), asTextList("ml", "27pL", "VPxr"), asTextList(""), asTextList("e", "Dj")),
+            asList(asTextList("", "XYO", "fEWz"), asTextList("", "oU"), asTextList("o 8", "", ""),
+                asTextList("giML", "H7g"), asTextList("SWX9", "H", "emwt")),
+            asList(asTextList("Sp"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("GCx"), asTextList("", "V"), asTextList("pF", "R7", ""), asTextList("", "AKal"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("m", "MBAv", "7R9F"), asTextList("ovv"), asTextList("p 7l"))
+        ))
         .go();
-
   }
 
   @Test
@@ -637,98 +487,61 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_0[0], arr_n_0[1], arr_n_1[0], arr_n_1[1], arr_n_0[3], arr_n_1[3] FROM hive.`varchar_array`")
         .unOrdered()
         .baselineColumns("EXPR$0", "EXPR$1", "EXPR$2", "EXPR$3", "EXPR$4", "EXPR$5")
-        .baselineValues(
-            "Five",
-            "One",
-            asList(new Text("Five"), new Text("One"), new Text("$42")),
-            asList(new Text("T"), new Text("K"), new Text("O")),
-            null,
-            emptyList())
-        .baselineValues(
-            null,
-            null,
-            emptyList(),
-            emptyList(),
-            null,
-            emptyList())
-        .baselineValues(
-            "ZZ0",
-            "-c54g",
-            asList(new Text("-c54g")),
-            emptyList(),
-            "k22k",
-            emptyList())
+        .baselineValues("Five", "One", asTextList("Five", "One", "$42"), asTextList("T", "K", "O"), null, emptyList())
+        .baselineValues(null, null, emptyList(), emptyList(), null, emptyList())
+        .baselineValues("ZZ0", "-c54g", asTextList("-c54g"), emptyList(), "k22k", emptyList())
         .go();
   }
 
   @Test
   public void charArray() throws Exception {
+    checkCharArrayInTable("char_array");
+  }
+
+  @Test
+  public void charArrayParquet() throws Exception {
+    checkNativeScanUsed("char_array_p");
+    checkCharArrayInTable("char_array_p");
+  }
+
+  private void checkCharArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<CHAR(2)>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`char_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(new Text("aa"), new Text("cc"), new Text("ot")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(new Text("+a"), new Text("-c"), new Text("*t")))
+        .baselineValues(asTextList("aa", "cc", "ot"))
+        .baselineValues(emptyList())
+        .baselineValues(asTextList("+a", "-c", "*t"))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<CHAR(2)>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`char_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
-            asList(new Text("aa")),
-            asList(new Text("cc"), new Text("ot"))))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(new Text("*t"))))
+        .baselineValues(asList(asTextList("aa"), asTextList("cc", "ot")))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asTextList("*t")))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<CHAR(2)>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`char_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("eT"))//[0][0]
-                ),
-                asList( // [1]
-                    asList(new Text("w9"), new Text("fC"), new Text("ww")),//[1][0]
-                    asList(new Text("3o"), new Text("f7"), new Text("Za")),//[1][1]
-                    asList(new Text("lX"), new Text("iv"), new Text("jI"))//[1][2]
-                ),
-                asList( // [2]
-                    asList(new Text("S3"), new Text("Qa"), new Text("aG")),//[2][0]
-                    asList(new Text("bj"), new Text("gc"), new Text("NO"))//[2][1]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("PV"), new Text("tH"), new Text("B7")),//[0][0]
-                    asList(new Text("uL")),//[0][1]
-                    asList(new Text("7b"), new Text("uf")),//[0][2]
-                    asList(new Text("zj")),//[0][3]
-                    asList(new Text("sA"), new Text("hf"), new Text("hR"))//[0][4]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new Text("W1"), new Text("FS")),//[0][0]
-                    asList(new Text("le"), new Text("c0")),//[0][1]
-                    asList(new Text(""), new Text("0v"))//[0][2]
-                ),
-                asList( // [1]
-                    asList(new Text("gj"))//[1][0]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asTextList("eT")),
+            asList(asTextList("w9", "fC", "ww"), asTextList("3o", "f7", "Za"), asTextList("lX", "iv", "jI")),
+            asList(asTextList("S3", "Qa", "aG"), asTextList("bj", "gc", "NO"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("PV", "tH", "B7"), asTextList("uL"), asTextList("7b", "uf"), asTextList("zj"), asTextList("sA", "hf", "hR"))
+        ))
+        .baselineValues(asList(
+            asList(asTextList("W1", "FS"), asTextList("le", "c0"), asTextList("", "0v")),
+            asList(asTextList("gj"))
+        ))
         .go();
   }
 
@@ -739,117 +552,65 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_0[0], arr_n_0[1], arr_n_1[0], arr_n_1[1], arr_n_0[3], arr_n_1[3] FROM hive.`char_array`")
         .unOrdered()
         .baselineColumns("EXPR$0", "EXPR$1", "EXPR$2", "EXPR$3", "EXPR$4", "EXPR$5")
-        .baselineValues(
-            "aa",
-            "cc",
-            asList(new Text("aa")),
-            asList(new Text("cc"), new Text("ot")),
-            null,
-            emptyList())
-        .baselineValues(
-            null,
-            null,
-            emptyList(),
-            emptyList(),
-            null,
-            emptyList())
-        .baselineValues(
-            "+a",
-            "-c",
-            asList(new Text("*t")),
-            emptyList(),
-            null,
-            emptyList())
+        .baselineValues("aa", "cc", asTextList("aa"), asTextList("cc", "ot"), null, emptyList())
+        .baselineValues(null, null, emptyList(), emptyList(), null, emptyList())
+        .baselineValues("+a", "-c", asTextList("*t"), emptyList(), null, emptyList())
         .go();
   }
 
   @Test
   public void tinyintArray() throws Exception {
+    checkTinyintArrayInTable("tinyint_array");
+  }
+
+  @Test
+  public void tinyintArrayParquet() throws Exception {
+    checkNativeScanUsed("tinyint_array_p");
+    checkTinyintArrayInTable("tinyint_array_p");
+  }
+
+  private void checkTinyintArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<TINYINT>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`tinyint_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-128, 0, 127))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(-101))
+        .baselineValues(asList(-128, 0, 127))
+        .baselineValues(emptyList())
+        .baselineValues(asList(-101))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<TINYINT>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`tinyint_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(-128, -127), asList(0, 1), asList(127, 126)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(-102)))
+        .baselineValues(asList(asList(-128, -127), asList(0, 1), asList(127, 126)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(-102)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<TINYINT>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`tinyint_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(31, 65, 54),//[0][0]
-                    asList(66),//[0][1]
-                    asList(22),//[0][2]
-                    asList(-33, -125, 116)//[0][3]
-                ),
-                asList( // [1]
-                    asList(-5, -10)//[1][0]
-                ),
-                asList( // [2]
-                    asList(78),//[2][0]
-                    asList(86),//[2][1]
-                    asList(90, 34),//[2][2]
-                    asList(32)//[2][3]
-                ),
-                asList( // [3]
-                    asList(103, -49, -33),//[3][0]
-                    asList(-30),//[3][1]
-                    asList(107, 24, 74),//[3][2]
-                    asList(16, -58)//[3][3]
-                ),
-                asList( // [4]
-                    asList(-119, -8),//[4][0]
-                    asList(50, -99, 26),//[4][1]
-                    asList(-119)//[4][2]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(-90, -113),//[0][0]
-                    asList(71, -65)//[0][1]
-                ),
-                asList( // [1]
-                    asList(88, -83)//[1][0]
-                ),
-                asList( // [2]
-                    asList(11),//[2][0]
-                    asList(121, -57)//[2][1]
-                ),
-                asList( // [3]
-                    asList(-79),//[3][0]
-                    asList(16, -111, -111),//[3][1]
-                    asList(90, 106),//[3][2]
-                    asList(33, 29, 42),//[3][3]
-                    asList(74)//[3][4]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(74, -115),//[0][0]
-                    asList(19, 85, 3)//[0][1]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asList(31, 65, 54), asList(66), asList(22), asList(-33, -125, 116)),
+            asList(asList(-5, -10)),
+            asList(asList(78), asList(86), asList(90, 34), asList(32)),
+            asList(asList(103, -49, -33), asList(-30), asList(107, 24, 74), asList(16, -58)),
+            asList(asList(-119, -8), asList(50, -99, 26), asList(-119))
+        ))
+        .baselineValues(asList(
+            asList(asList(-90, -113), asList(71, -65)),
+            asList(asList(88, -83)),
+            asList(asList(11), asList(121, -57)),
+            asList(asList(-79), asList(16, -111, -111), asList(90, 106), asList(33, 29, 42), asList(74))
+        ))
+        .baselineValues(asList(
+            asList(asList(74, -115), asList(19, 85, 3))
+        ))
         .go();
   }
 
@@ -868,471 +629,369 @@ public class TestHiveArrays extends ClusterTest {
 
   @Test
   public void smallintArray() throws Exception {
+    checkSmallintArrayInTable("smallint_array");
+  }
+
+  @Test
+  public void smallintArrayParquet() throws Exception {
+    checkNativeScanUsed("smallint_array_p");
+    checkSmallintArrayInTable("smallint_array_p");
+  }
+
+  private void checkSmallintArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<SMALLINT>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`smallint_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-32768, 0, 32767))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(10500))
+        .baselineValues(asList(-32768, 0, 32767))
+        .baselineValues(emptyList())
+        .baselineValues(asList(10500))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<SMALLINT>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`smallint_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(-32768, -32768), asList(0, 0), asList(32767, 32767)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(10500, 5010)))
+        .baselineValues(asList(asList(-32768, -32768), asList(0, 0), asList(32767, 32767)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(10500, 5010)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<SMALLINT>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`smallint_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(-28752)//[0][0]
-                ),
-                asList( // [1]
-                    asList(17243, 15652),//[1][0]
-                    asList(-9684),//[1][1]
-                    asList(10176, 18123),//[1][2]
-                    asList(-15404, 15420),//[1][3]
-                    asList(11136, -19435)//[1][4]
-                ),
-                asList( // [2]
-                    asList(-29634, -12695),//[2][0]
-                    asList(4350, -24289, -10889)//[2][1]
-                ),
-                asList( // [3]
-                    asList(13731),//[3][0]
-                    asList(27661, -15794, 21784),//[3][1]
-                    asList(14341, -4635),//[3][2]
-                    asList(1601, -29973),//[3][3]
-                    asList(2750, 30373, -11630)//[3][4]
-                ),
-                asList( // [4]
-                    asList(-11383)//[4][0]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(23860),//[0][0]
-                    asList(-27345, 19068),//[0][1]
-                    asList(-7174, 286, 14673)//[0][2]
-                ),
-                asList( // [1]
-                    asList(14844, -9087),//[1][0]
-                    asList(-25185, 219),//[1][1]
-                    asList(26875),//[1][2]
-                    asList(-4699),//[1][3]
-                    asList(-3853, -15729, 11472)//[1][4]
-                ),
-                asList( // [2]
-                    asList(-29142),//[2][0]
-                    asList(-13859),//[2][1]
-                    asList(-23073, 31368, -26542)//[2][2]
-                ),
-                asList( // [3]
-                    asList(14914, 14656),//[3][0]
-                    asList(4636, 6289)//[3][1]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(10426, 31865),//[0][0]
-                    asList(-19088),//[0][1]
-                    asList(-4774),//[0][2]
-                    asList(17988)//[0][3]
-                ),
-                asList( // [1]
-                    asList(-6214, -26836, 30715)//[1][0]
-                ),
-                asList( // [2]
-                    asList(-4231),//[2][0]
-                    asList(31742, -661),//[2][1]
-                    asList(-22842, 4203),//[2][2]
-                    asList(18278)//[2][3]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asList(-28752)),
+            asList(asList(17243, 15652), asList(-9684), asList(10176, 18123), asList(-15404, 15420), asList(11136, -19435)),
+            asList(asList(-29634, -12695), asList(4350, -24289, -10889)),
+            asList(asList(13731), asList(27661, -15794, 21784), asList(14341, -4635), asList(1601, -29973), asList(2750, 30373, -11630)),
+            asList(asList(-11383))
+        ))
+        .baselineValues(asList(
+            asList(asList(23860), asList(-27345, 19068), asList(-7174, 286, 14673)),
+            asList(asList(14844, -9087), asList(-25185, 219), asList(26875), asList(-4699), asList(-3853, -15729, 11472)),
+            asList(asList(-29142), asList(-13859), asList(-23073, 31368, -26542)),
+            asList(asList(14914, 14656), asList(4636, 6289))
+        ))
+        .baselineValues(asList(
+            asList(asList(10426, 31865), asList(-19088), asList(-4774), asList(17988)),
+            asList(asList(-6214, -26836, 30715)),
+            asList(asList(-4231), asList(31742, -661), asList(-22842, 4203), asList(18278))
+        ))
         .go();
   }
 
   @Test
   public void decimalArray() throws Exception {
+    checkDecimalArrayInTable("decimal_array");
+  }
+
+  @Test
+  public void decimalArrayParquet() throws Exception {
+    checkNativeScanUsed("decimal_array_p");
+    checkDecimalArrayInTable("decimal_array_p");
+  }
+
+  private void checkDecimalArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<DECIMAL(9,3)>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`decimal_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(new BigDecimal("-100000.000"), new BigDecimal("102030.001"), new BigDecimal("0.001")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(new BigDecimal("-10.500")))
+        .baselineValues(asList(new BigDecimal("-100000.000"), new BigDecimal("102030.001"), new BigDecimal("0.001")))
+        .baselineValues(emptyList())
+        .baselineValues(asList(new BigDecimal("-10.500")))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<DECIMAL(9,3)>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`decimal_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
+        .baselineValues(asList(
             asList(new BigDecimal("-100000.000"), new BigDecimal("102030.001")),
             asList(new BigDecimal("0.101"), new BigDecimal("0.102")),
             asList(new BigDecimal("0.001"), new BigDecimal("327670.001"))))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(new BigDecimal("10.500"), new BigDecimal("5.010"))))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(new BigDecimal("10.500"), new BigDecimal("5.010"))))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<DECIMAL(9,3)>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`decimal_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new BigDecimal("9.453")),//[0][0]
-                    asList(new BigDecimal("8.233"), new BigDecimal("-146577.465")),//[0][1]
-                    asList(new BigDecimal("-911144.423"), new BigDecimal("-862766.866"), new BigDecimal("-129948.784"))//[0][2]
-                ),
-                asList( // [1]
-                    asList(new BigDecimal("931346.867"))//[1][0]
-                ),
-                asList( // [2]
-                    asList(new BigDecimal("81.750")),//[2][0]
-                    asList(new BigDecimal("587225.077"), new BigDecimal("-3.930")),//[2][1]
-                    asList(new BigDecimal("0.042")),//[2][2]
-                    asList(new BigDecimal("-342346.511"))//[2][3]
-                )
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(new BigDecimal("9.453")),//[0][0]
+                asList(new BigDecimal("8.233"), new BigDecimal("-146577.465")),//[0][1]
+                asList(new BigDecimal("-911144.423"), new BigDecimal("-862766.866"), new BigDecimal("-129948.784"))//[0][2]
+            ),
+            asList( // [1]
+                asList(new BigDecimal("931346.867"))//[1][0]
+            ),
+            asList( // [2]
+                asList(new BigDecimal("81.750")),//[2][0]
+                asList(new BigDecimal("587225.077"), new BigDecimal("-3.930")),//[2][1]
+                asList(new BigDecimal("0.042")),//[2][2]
+                asList(new BigDecimal("-342346.511"))//[2][3]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new BigDecimal("375098.406"), new BigDecimal("84.509")),//[0][0]
-                    asList(new BigDecimal("-446325.287"), new BigDecimal("3.671")),//[0][1]
-                    asList(new BigDecimal("286958.380"), new BigDecimal("314821.890"), new BigDecimal("18513.303")),//[0][2]
-                    asList(new BigDecimal("-444023.971"), new BigDecimal("827746.528"), new BigDecimal("-54.986")),//[0][3]
-                    asList(new BigDecimal("-44520.406"))//[0][4]
-                )
+        ))
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(new BigDecimal("375098.406"), new BigDecimal("84.509")),//[0][0]
+                asList(new BigDecimal("-446325.287"), new BigDecimal("3.671")),//[0][1]
+                asList(new BigDecimal("286958.380"), new BigDecimal("314821.890"), new BigDecimal("18513.303")),//[0][2]
+                asList(new BigDecimal("-444023.971"), new BigDecimal("827746.528"), new BigDecimal("-54.986")),//[0][3]
+                asList(new BigDecimal("-44520.406"))//[0][4]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(new BigDecimal("906668.849"), new BigDecimal("1.406")),//[0][0]
-                    asList(new BigDecimal("-494177.333"), new BigDecimal("952997.058"))//[0][1]
-                ),
-                asList( // [1]
-                    asList(new BigDecimal("642385.159"), new BigDecimal("369753.830"), new BigDecimal("634889.981")),//[1][0]
-                    asList(new BigDecimal("83970.515"), new BigDecimal("-847315.758"), new BigDecimal("-0.600")),//[1][1]
-                    asList(new BigDecimal("73013.870")),//[1][2]
-                    asList(new BigDecimal("337872.675"), new BigDecimal("375940.114"), new BigDecimal("-2.670")),//[1][3]
-                    asList(new BigDecimal("-7.899"), new BigDecimal("755611.538"))//[1][4]
-                )
+        ))
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(new BigDecimal("906668.849"), new BigDecimal("1.406")),//[0][0]
+                asList(new BigDecimal("-494177.333"), new BigDecimal("952997.058"))//[0][1]
+            ),
+            asList( // [1]
+                asList(new BigDecimal("642385.159"), new BigDecimal("369753.830"), new BigDecimal("634889.981")),//[1][0]
+                asList(new BigDecimal("83970.515"), new BigDecimal("-847315.758"), new BigDecimal("-0.600")),//[1][1]
+                asList(new BigDecimal("73013.870")),//[1][2]
+                asList(new BigDecimal("337872.675"), new BigDecimal("375940.114"), new BigDecimal("-2.670")),//[1][3]
+                asList(new BigDecimal("-7.899"), new BigDecimal("755611.538"))//[1][4]
             )
-        )
+        ))
         .go();
   }
 
   @Test
   public void booleanArray() throws Exception {
+    checkBooleanArrayInTable("boolean_array");
+  }
+
+  @Test
+  public void booleanArrayParquet() throws Exception {
+    checkNativeScanUsed("boolean_array_p");
+    checkBooleanArrayInTable("boolean_array_p");
+  }
+
+  private void checkBooleanArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<BOOLEAN>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`boolean_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(false, true, false, true, false))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(true))
+        .baselineValues(asList(false, true, false, true, false))
+        .baselineValues(emptyList())
+        .baselineValues(Collections.singletonList(true))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<BOOLEAN>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`boolean_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(true, false, true), asList(false, false)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(false, true)))
+        .baselineValues(asList(asList(true, false, true), asList(false, false)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(false, true)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<BOOLEAN>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`boolean_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(false, true)//[0][0]
-                ),
-                asList( // [1]
-                    asList(true),//[1][0]
-                    asList(false, true),//[1][1]
-                    asList(true),//[1][2]
-                    asList(true)//[1][3]
-                ),
-                asList( // [2]
-                    asList(false),//[2][0]
-                    asList(true, false, false),//[2][1]
-                    asList(true, true),//[2][2]
-                    asList(false, true, false)//[2][3]
-                ),
-                asList( // [3]
-                    asList(false, true),//[3][0]
-                    asList(true, false),//[3][1]
-                    asList(true, false, true)//[3][2]
-                ),
-                asList( // [4]
-                    asList(false),//[4][0]
-                    asList(false),//[4][1]
-                    asList(false)//[4][2]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(false, true),//[0][0]
-                    asList(false),//[0][1]
-                    asList(false, false),//[0][2]
-                    asList(true, true, true),//[0][3]
-                    asList(false)//[0][4]
-                ),
-                asList( // [1]
-                    asList(false, false, true)//[1][0]
-                ),
-                asList( // [2]
-                    asList(false, true),//[2][0]
-                    asList(true, false)//[2][1]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(true, true),//[0][0]
-                    asList(false, true, false),//[0][1]
-                    asList(true),//[0][2]
-                    asList(true, true, false)//[0][3]
-                ),
-                asList( // [1]
-                    asList(false),//[1][0]
-                    asList(false, true),//[1][1]
-                    asList(false),//[1][2]
-                    asList(false)//[1][3]
-                ),
-                asList( // [2]
-                    asList(true, true, true),//[2][0]
-                    asList(true, true, true),//[2][1]
-                    asList(false),//[2][2]
-                    asList(false)//[2][3]
-                ),
-                asList( // [3]
-                    asList(false, false)//[3][0]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asList(false, true)),
+            asList(asList(true), asList(false, true), asList(true), asList(true)),
+            asList(asList(false), asList(true, false, false), asList(true, true), asList(false, true, false)),
+            asList(asList(false, true), asList(true, false), asList(true, false, true)),
+            asList(asList(false), asList(false), asList(false))
+        ))
+        .baselineValues(asList(
+            asList(asList(false, true), asList(false), asList(false, false), asList(true, true, true), asList(false)),
+            asList(asList(false, false, true)),
+            asList(asList(false, true), asList(true, false))
+        ))
+        .baselineValues(asList(
+            asList(asList(true, true), asList(false, true, false), asList(true), asList(true, true, false)),
+            asList(asList(false), asList(false, true), asList(false), asList(false)),
+            asList(asList(true, true, true), asList(true, true, true), asList(false), asList(false)),
+            asList(asList(false, false))
+        ))
         .go();
   }
 
   @Test
   public void bigintArray() throws Exception {
+    checkBigintArrayInTable("bigint_array");
+  }
+
+  @Test
+  public void bigintArrayParquet() throws Exception {
+    checkNativeScanUsed("bigint_array_p");
+    checkBigintArrayInTable("bigint_array_p");
+  }
+
+  private void checkBigintArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<BIGINT>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`bigint_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-9223372036854775808L, 0L, 10000000010L, 9223372036854775807L))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(10005000L))
+        .baselineValues(asList(-9223372036854775808L, 0L, 10000000010L, 9223372036854775807L))
+        .baselineValues(emptyList())
+        .baselineValues(asList(10005000L))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<BIGINT>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`bigint_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
-            asList(-9223372036854775808L, 0L, 10000000010L),
-            asList(9223372036854775807L, 9223372036854775807L)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(10005000L, 100050010L)))
+        .baselineValues(asList(asList(-9223372036854775808L, 0L, 10000000010L), asList(9223372036854775807L, 9223372036854775807L)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(10005000L, 100050010L)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<BIGINT>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`bigint_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(7345032157033769004L),//[0][0]
-                    asList(-2306607274383855051L, 3656249581579032003L)//[0][1]
-                ),
-                asList( // [1]
-                    asList(6044100897358387146L, 4737705104728607904L)//[1][0]
-                )
+        .baselineValues(asList(
+            asList( // [0]
+                asList(7345032157033769004L),//[0][0]
+                asList(-2306607274383855051L, 3656249581579032003L)//[0][1]
+            ),
+            asList( // [1]
+                asList(6044100897358387146L, 4737705104728607904L)//[1][0]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(4833583793282587107L, -8917877693351417844L, -3226305034926780974L)//[0][0]
-                )
+        ))
+        .baselineValues(asList(
+            asList( // [0]
+                asList(4833583793282587107L, -8917877693351417844L, -3226305034926780974L)//[0][0]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(8679405200896733338L, 8581721713860760451L, 1150622751848016114L),//[0][0]
-                    asList(-6672104994192826124L, 4807952216371616134L),//[0][1]
-                    asList(-7874492057876324257L)//[0][2]
-                ),
-                asList( // [1]
-                    asList(8197656735200560038L),//[1][0]
-                    asList(7643173300425098029L, -3186442699228156213L, -8370345321491335247L),//[1][1]
-                    asList(8781633305391982544L, -7187468334864189662L)//[1][2]
-                ),
-                asList( // [2]
-                    asList(6685428436181310098L),//[2][0]
-                    asList(1358587806266610826L),//[2][1]
-                    asList(-2077124879355227614L, -6787493227661516341L),//[2][2]
-                    asList(3713296190482954025L, -3890396613053404789L),//[2][3]
-                    asList(4636761050236625699L, 5268453104977816600L)//[2][4]
-                )
+        ))
+        .baselineValues(asList(
+            asList( // [0]
+                asList(8679405200896733338L, 8581721713860760451L, 1150622751848016114L),//[0][0]
+                asList(-6672104994192826124L, 4807952216371616134L),//[0][1]
+                asList(-7874492057876324257L)//[0][2]
+            ),
+            asList( // [1]
+                asList(8197656735200560038L),//[1][0]
+                asList(7643173300425098029L, -3186442699228156213L, -8370345321491335247L),//[1][1]
+                asList(8781633305391982544L, -7187468334864189662L)//[1][2]
+            ),
+            asList( // [2]
+                asList(6685428436181310098L),//[2][0]
+                asList(1358587806266610826L),//[2][1]
+                asList(-2077124879355227614L, -6787493227661516341L),//[2][2]
+                asList(3713296190482954025L, -3890396613053404789L),//[2][3]
+                asList(4636761050236625699L, 5268453104977816600L)//[2][4]
             )
-        )
+        ))
         .go();
   }
 
   @Test
   public void floatArray() throws Exception {
+    checkFloatArrayInTable("float_array");
+  }
+
+  @Test
+  public void floatArrayParquet() throws Exception {
+    checkNativeScanUsed("float_array_p");
+    checkFloatArrayInTable("float_array_p");
+  }
+
+  private void checkFloatArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<FLOAT>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`float_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-32.058f, 94.47389f, 16.107912f))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(Collections.singletonList(25.96484f))
+        .baselineValues(asList(-32.058f, 94.47389f, 16.107912f))
+        .baselineValues(emptyList())
+        .baselineValues(Collections.singletonList(25.96484f))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<FLOAT>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`float_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(-82.399826f, 12.633938f, 86.19402f), asList(-13.03544f, 64.65487f)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(15.259451f, -15.259451f)))
+        .baselineValues(asList(asList(-82.399826f, 12.633938f, 86.19402f), asList(-13.03544f, 64.65487f)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(15.259451f, -15.259451f)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<FLOAT>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`float_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(-5.6506114f),//[0][0]
-                    asList(26.546333f, 3724.8389f),//[0][1]
-                    asList(-53.65775f, 686.8335f, -0.99032f)//[0][2]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(29.042528f),//[0][0]
-                    asList(3524.3398f, -8856.58f, 6.8508215f)//[0][1]
-                ),
-                asList( // [1]
-                    asList(-0.73994386f, -2.0008986f),//[1][0]
-                    asList(-9.903006f, -271.26172f),//[1][1]
-                    asList(-131.80347f),//[1][2]
-                    asList(39.721367f, -4870.5444f),//[1][3]
-                    asList(-1.4830998f, -766.3066f, -0.1659732f)//[1][4]
-                ),
-                asList( // [2]
-                    asList(3467.0298f, -240.64255f),//[2][0]
-                    asList(2.4072556f, -85.89145f)//[2][1]
-                )
-            )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(-888.68243f, -38.09065f),//[0][0]
-                    asList(-6948.154f, -185.64319f, 0.7401936f),//[0][1]
-                    asList(-705.2718f, -932.4041f)//[0][2]
-                ),
-                asList( // [1]
-                    asList(-2.581712f, 0.28686252f, -0.98652786f),//[1][0]
-                    asList(-57.448563f, -0.0057083773f, -0.21712556f),//[1][1]
-                    asList(-8.076653f, -8149.519f, -7.5968184f),//[1][2]
-                    asList(8.823492f),//[1][3]
-                    asList(-9134.323f, 467.53275f, -59.763447f)//[1][4]
-                ),
-                asList( // [2]
-                    asList(0.33596575f, 6805.2256f, -3087.9531f),//[2][0]
-                    asList(9816.865f, -164.90712f, -1.9071647f)//[2][1]
-                ),
-                asList( // [3]
-                    asList(-0.23883149f),//[3][0]
-                    asList(-5.3763375f, -4.7661624f)//[3][1]
-                ),
-                asList( // [4]
-                    asList(-52.42167f, 247.91452f),//[4][0]
-                    asList(9499.771f),//[4][1]
-                    asList(-0.6549191f, 4340.83f)//[4][2]
-                )
-            )
-        )
+        .baselineValues(asList(
+            asList(asList(-5.6506114f), asList(26.546333f, 3724.8389f), asList(-53.65775f, 686.8335f, -0.99032f))
+        ))
+        .baselineValues(asList(
+            asList(asList(29.042528f), asList(3524.3398f, -8856.58f, 6.8508215f)),
+            asList(asList(-0.73994386f, -2.0008986f), asList(-9.903006f, -271.26172f), asList(-131.80347f),
+                asList(39.721367f, -4870.5444f), asList(-1.4830998f, -766.3066f, -0.1659732f)),
+            asList(asList(3467.0298f, -240.64255f), asList(2.4072556f, -85.89145f))
+        ))
+        .baselineValues(asList(
+            asList(asList(-888.68243f, -38.09065f), asList(-6948.154f, -185.64319f, 0.7401936f), asList(-705.2718f, -932.4041f)),
+            asList(asList(-2.581712f, 0.28686252f, -0.98652786f), asList(-57.448563f, -0.0057083773f, -0.21712556f),
+                asList(-8.076653f, -8149.519f, -7.5968184f), asList(8.823492f), asList(-9134.323f, 467.53275f, -59.763447f)),
+            asList(asList(0.33596575f, 6805.2256f, -3087.9531f), asList(9816.865f, -164.90712f, -1.9071647f)),
+            asList(asList(-0.23883149f), asList(-5.3763375f, -4.7661624f)),
+            asList(asList(-52.42167f, 247.91452f), asList(9499.771f), asList(-0.6549191f, 4340.83f))
+        ))
         .go();
   }
 
   @Test
   public void doubleArray() throws Exception {
+    checkDoubleArrayInTable("double_array");
+  }
+
+  @Test
+  public void doubleArrayParquet() throws Exception {
+    checkNativeScanUsed("double_array_p");
+    checkDoubleArrayInTable("double_array_p");
+  }
+
+  private void checkDoubleArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<DOUBLE>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`double_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(-13.241563769628, 0.3436367772981237, 9.73366))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(15.581409176959358))
+        .baselineValues(asList(-13.241563769628, 0.3436367772981237, 9.73366))
+        .baselineValues(emptyList())
+        .baselineValues(asList(15.581409176959358))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<DOUBLE>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`double_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(asList(-24.049666910012498, 14.975034200, 1.19975056092457), asList(-2.293376758961259, 80.783)))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(0.47745359256854, -0.47745359256854)))
+        .baselineValues(asList(asList(-24.049666910012498, 14.975034200, 1.19975056092457), asList(-2.293376758961259, 80.783)))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(0.47745359256854, -0.47745359256854)))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<DOUBLE>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`double_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(-9.269519394436928),//[0][0]
@@ -1343,7 +1002,7 @@ public class TestHiveArrays extends ClusterTest {
                 )
             )
         )
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(-7966.1700155142025, 2519.664646202656),//[0][0]
@@ -1370,7 +1029,7 @@ public class TestHiveArrays extends ClusterTest {
                 )
             )
         )
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(0.054727088545119096, 0.3289046600776335, -183.0613955159468)//[0][0]
@@ -1389,125 +1048,139 @@ public class TestHiveArrays extends ClusterTest {
 
   @Test
   public void dateArray() throws Exception {
+    checkDateArrayInTable("date_array");
+  }
 
+  @Test
+  public void dateArrayParquet() throws Exception {
+    checkNativeScanUsed("date_array_p");
+    checkDateArrayInTable("date_array_p");
+  }
+
+  private void checkDateArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<DATE>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`date_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(
+        .baselineValues(asList(
             parseLocalDate("2018-10-21"),
             parseLocalDate("2017-07-11"),
             parseLocalDate("2018-09-23")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(parseLocalDate("2018-07-14")))
+        .baselineValues(emptyList())
+        .baselineValues(asList(parseLocalDate("2018-07-14")))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<DATE>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`date_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
+        .baselineValues(asList(
             asList(parseLocalDate("2017-03-21"), parseLocalDate("2017-09-10"), parseLocalDate("2018-01-17")),
             asList(parseLocalDate("2017-03-24"), parseLocalDate("2018-09-22"))))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(parseLocalDate("2017-08-09"), parseLocalDate("2017-08-28"))))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(parseLocalDate("2017-08-09"), parseLocalDate("2017-08-28"))))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<DATE>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`date_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(parseLocalDate("1952-08-24")),//[0][0]
-                    asList(parseLocalDate("1968-10-05"), parseLocalDate("1951-07-27")),//[0][1]
-                    asList(parseLocalDate("1943-11-18"), parseLocalDate("1991-04-27"))//[0][2]
-                ),
-                asList( // [1]
-                    asList(parseLocalDate("1981-12-27"), parseLocalDate("1984-02-03")),//[1][0]
-                    asList(parseLocalDate("1953-04-15"), parseLocalDate("2002-08-15"), parseLocalDate("1926-12-10")),//[1][1]
-                    asList(parseLocalDate("2009-08-09"), parseLocalDate("1919-08-30"), parseLocalDate("1906-04-10")),//[1][2]
-                    asList(parseLocalDate("1995-10-28"), parseLocalDate("1989-09-07")),//[1][3]
-                    asList(parseLocalDate("2002-01-03"), parseLocalDate("1929-03-17"), parseLocalDate("1939-10-23"))//[1][4]
-                )
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(parseLocalDate("1952-08-24")),//[0][0]
+                asList(parseLocalDate("1968-10-05"), parseLocalDate("1951-07-27")),//[0][1]
+                asList(parseLocalDate("1943-11-18"), parseLocalDate("1991-04-27"))//[0][2]
+            ),
+            asList( // [1]
+                asList(parseLocalDate("1981-12-27"), parseLocalDate("1984-02-03")),//[1][0]
+                asList(parseLocalDate("1953-04-15"), parseLocalDate("2002-08-15"), parseLocalDate("1926-12-10")),//[1][1]
+                asList(parseLocalDate("2009-08-09"), parseLocalDate("1919-08-30"), parseLocalDate("1906-04-10")),//[1][2]
+                asList(parseLocalDate("1995-10-28"), parseLocalDate("1989-09-07")),//[1][3]
+                asList(parseLocalDate("2002-01-03"), parseLocalDate("1929-03-17"), parseLocalDate("1939-10-23"))//[1][4]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(parseLocalDate("1936-05-05"), parseLocalDate("1941-04-12"), parseLocalDate("1914-04-15"))//[0][0]
-                ),
-                asList( // [1]
-                    asList(parseLocalDate("1944-05-09"), parseLocalDate("2002-02-11"))//[1][0]
-                )
+        ))
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(parseLocalDate("1936-05-05"), parseLocalDate("1941-04-12"), parseLocalDate("1914-04-15"))//[0][0]
+            ),
+            asList( // [1]
+                asList(parseLocalDate("1944-05-09"), parseLocalDate("2002-02-11"))//[1][0]
             )
-        )
-        .baselineValuesForSingleColumn(
-            asList( // row
-                asList( // [0]
-                    asList(parseLocalDate("1965-04-18"), parseLocalDate("2012-11-07"), parseLocalDate("1961-03-15")),//[0][0]
-                    asList(parseLocalDate("1922-05-22"), parseLocalDate("1978-03-25")),//[0][1]
-                    asList(parseLocalDate("1935-05-29"))//[0][2]
-                ),
-                asList( // [1]
-                    asList(parseLocalDate("1904-07-08"), parseLocalDate("1968-05-23"), parseLocalDate("1946-03-31")),//[1][0]
-                    asList(parseLocalDate("2014-01-28")),//[1][1]
-                    asList(parseLocalDate("1938-09-20"), parseLocalDate("1920-07-09"), parseLocalDate("1990-12-31")),//[1][2]
-                    asList(parseLocalDate("1984-07-20"), parseLocalDate("1988-11-25")),//[1][3]
-                    asList(parseLocalDate("1941-12-21"), parseLocalDate("1939-01-16"), parseLocalDate("2012-09-19"))//[1][4]
-                ),
-                asList( // [2]
-                    asList(parseLocalDate("2020-12-28")),//[2][0]
-                    asList(parseLocalDate("1930-11-13")),//[2][1]
-                    asList(parseLocalDate("2014-05-02"), parseLocalDate("1935-02-16"), parseLocalDate("1919-01-17")),//[2][2]
-                    asList(parseLocalDate("1972-04-20"), parseLocalDate("1951-05-30"), parseLocalDate("1963-01-11"))//[2][3]
-                ),
-                asList( // [3]
-                    asList(parseLocalDate("1993-03-20"), parseLocalDate("1978-12-31")),//[3][0]
-                    asList(parseLocalDate("1965-12-15"), parseLocalDate("1970-09-02"), parseLocalDate("2010-05-25"))//[3][1]
-                )
+        ))
+        .baselineValues(asList( // row
+            asList( // [0]
+                asList(parseLocalDate("1965-04-18"), parseLocalDate("2012-11-07"), parseLocalDate("1961-03-15")),//[0][0]
+                asList(parseLocalDate("1922-05-22"), parseLocalDate("1978-03-25")),//[0][1]
+                asList(parseLocalDate("1935-05-29"))//[0][2]
+            ),
+            asList( // [1]
+                asList(parseLocalDate("1904-07-08"), parseLocalDate("1968-05-23"), parseLocalDate("1946-03-31")),//[1][0]
+                asList(parseLocalDate("2014-01-28")),//[1][1]
+                asList(parseLocalDate("1938-09-20"), parseLocalDate("1920-07-09"), parseLocalDate("1990-12-31")),//[1][2]
+                asList(parseLocalDate("1984-07-20"), parseLocalDate("1988-11-25")),//[1][3]
+                asList(parseLocalDate("1941-12-21"), parseLocalDate("1939-01-16"), parseLocalDate("2012-09-19"))//[1][4]
+            ),
+            asList( // [2]
+                asList(parseLocalDate("2020-12-28")),//[2][0]
+                asList(parseLocalDate("1930-11-13")),//[2][1]
+                asList(parseLocalDate("2014-05-02"), parseLocalDate("1935-02-16"), parseLocalDate("1919-01-17")),//[2][2]
+                asList(parseLocalDate("1972-04-20"), parseLocalDate("1951-05-30"), parseLocalDate("1963-01-11"))//[2][3]
+            ),
+            asList( // [3]
+                asList(parseLocalDate("1993-03-20"), parseLocalDate("1978-12-31")),//[3][0]
+                asList(parseLocalDate("1965-12-15"), parseLocalDate("1970-09-02"), parseLocalDate("2010-05-25"))//[3][1]
             )
-        )
+        ))
         .go();
   }
 
   @Test
   public void timestampArray() throws Exception {
+    checkTimestampArrayInTable("timestamp_array");
+  }
+
+  @Test
+  public void timestampArrayParquet() throws Exception {
+    checkNativeScanUsed("timestamp_array_p");
+    checkTimestampArrayInTable("timestamp_array_p");
+  }
+
+  private void checkTimestampArrayInTable(String table) throws Exception {
     // Nesting 0: reading ARRAY<TIMESTAMP>
     testBuilder()
-        .sqlQuery("SELECT arr_n_0 FROM hive.`timestamp_array`")
+        .sqlQuery("SELECT arr_n_0 FROM hive.`%s`", table)
+        .optionSettingQueriesForTestQuery("alter session set `" + ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP + "` = true")
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(
+        .baselineValues(asList(
             parseBest("2018-10-21 04:51:36"),
             parseBest("2017-07-11 09:26:48"),
             parseBest("2018-09-23 03:02:33")))
-        .baselineValuesForSingleColumn(emptyList())
-        .baselineValuesForSingleColumn(asList(parseBest("2018-07-14 05:20:34")))
+        .baselineValues(emptyList())
+        .baselineValues(asList(parseBest("2018-07-14 05:20:34")))
         .go();
 
     // Nesting 1: reading ARRAY<ARRAY<TIMESTAMP>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_1 FROM hive.`timestamp_array`")
+        .sqlQuery("SELECT arr_n_1 FROM hive.`%s`", table)
         .unOrdered()
         .baselineColumns("arr_n_1")
-        .baselineValuesForSingleColumn(asList(
+        .baselineValues(asList(
             asList(parseBest("2017-03-21 12:52:33"), parseBest("2017-09-10 01:29:24"), parseBest("2018-01-17 04:45:23")),
             asList(parseBest("2017-03-24 01:03:23"), parseBest("2018-09-22 05:00:26"))))
-        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
-        .baselineValuesForSingleColumn(asList(asList(parseBest("2017-08-09 08:26:08"), parseBest("2017-08-28 09:47:23"))))
+        .baselineValues(asList(emptyList(), emptyList()))
+        .baselineValues(asList(asList(parseBest("2017-08-09 08:26:08"), parseBest("2017-08-28 09:47:23"))))
         .go();
 
     // Nesting 2: reading ARRAY<ARRAY<ARRAY<TIMESTAMP>>>
     testBuilder()
-        .sqlQuery("SELECT arr_n_2 FROM hive.`timestamp_array` order by rid")
+        .sqlQuery("SELECT arr_n_2 FROM hive.`%s` order by rid", table)
         .ordered()
         .baselineColumns("arr_n_2")
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(parseBest("1929-01-08 19:31:47")),//[0][0]
@@ -1529,7 +1202,7 @@ public class TestHiveArrays extends ClusterTest {
                 )
             )
         )
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(parseBest("1904-12-10 00:39:14")),//[0][0]
@@ -1546,7 +1219,7 @@ public class TestHiveArrays extends ClusterTest {
                 )
             )
         )
-        .baselineValuesForSingleColumn(
+        .baselineValues(
             asList( // row
                 asList( // [0]
                     asList(parseBest("1999-12-07 01:16:45")),//[0][0]
@@ -1566,8 +1239,8 @@ public class TestHiveArrays extends ClusterTest {
         .sqlQuery("SELECT arr_n_0 FROM hive.`binary_array`")
         .unOrdered()
         .baselineColumns("arr_n_0")
-        .baselineValuesForSingleColumn(asList(new StringBytes("First"), new StringBytes("Second"), new StringBytes("Third")))
-        .baselineValuesForSingleColumn(asList(new StringBytes("First")))
+        .baselineValues(asList(new StringBytes("First"), new StringBytes("Second"), new StringBytes("Third")))
+        .baselineValues(asList(new StringBytes("First")))
         .go();
   }
 
@@ -1587,14 +1260,14 @@ public class TestHiveArrays extends ClusterTest {
             asList(-1, 0, 1),
             asList(asList(-1, 0, 1), asList(-2, 1)),
 
-            asList(new Text("First Value Of Array"), new Text("komlnp"), new Text("The Last Value")),
-            asList(asList(new Text("Array 0, Value 0"), new Text("Array 0, Value 1")), asList(new Text("Array 1"))),
+            asTextList("First Value Of Array", "komlnp", "The Last Value"),
+            asList(asTextList("Array 0, Value 0", "Array 0, Value 1"), asTextList("Array 1")),
 
-            asList(new Text("Five"), new Text("One"), new Text("T")),
-            asList(asList(new Text("Five"), new Text("One"), new Text("$42")), asList(new Text("T"), new Text("K"), new Text("O"))),
+            asTextList("Five", "One", "T"),
+            asList(asTextList("Five", "One", "$42"), asTextList("T", "K", "O")),
 
-            asList(new Text("aa"), new Text("cc"), new Text("ot")),
-            asList(asList(new Text("aa")), asList(new Text("cc"), new Text("ot"))),
+            asTextList("aa", "cc", "ot"),
+            asList(asTextList("aa"), asTextList("cc", "ot")),
 
             asList(-128, 0, 127),
             asList(asList(-128, -127), asList(0, 1), asList(127, 126)),
@@ -1704,14 +1377,14 @@ public class TestHiveArrays extends ClusterTest {
             asList(-1, 0, 1),
             asList(asList(-1, 0, 1), asList(-2, 1)),
 
-            asList(new Text("First Value Of Array"), new Text("komlnp"), new Text("The Last Value")),
-            asList(asList(new Text("Array 0, Value 0"), new Text("Array 0, Value 1")), asList(new Text("Array 1"))),
+            asTextList("First Value Of Array", "komlnp", "The Last Value"),
+            asList(asTextList("Array 0, Value 0", "Array 0, Value 1"), asTextList("Array 1")),
 
-            asList(new Text("Five"), new Text("One"), new Text("T")),
-            asList(asList(new Text("Five"), new Text("One"), new Text("$42")), asList(new Text("T"), new Text("K"), new Text("O"))),
+            asTextList("Five", "One", "T"),
+            asList(asTextList("Five", "One", "$42"), asTextList("T", "K", "O")),
 
-            asList(new Text("aa"), new Text("cc"), new Text("ot")),
-            asList(asList(new Text("aa")), asList(new Text("cc"), new Text("ot"))),
+            asTextList("aa", "cc", "ot"),
+            asList(asTextList("aa"), asTextList("cc", "ot")),
 
             asList(-128, 0, 127),
             asList(asList(-128, -127), asList(0, 1), asList(127, 126)),
@@ -1775,4 +1448,10 @@ public class TestHiveArrays extends ClusterTest {
 
   }
 
+  private static List<Text> asTextList(String... strings) {
+    return Stream.of(strings)
+        .map(Text::new)
+        .collect(Collectors.toList());
+  }
+
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetGroupConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetGroupConverter.java
index 296d34c..898fcf1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetGroupConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetGroupConverter.java
@@ -17,15 +17,15 @@
  */
 package org.apache.drill.exec.store.parquet2;
 
-import static org.apache.drill.exec.store.parquet.ParquetReaderUtility.NanoTimeUtils.getDateTimeValueFromBinary;
-
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.function.BiFunction;
+import java.util.function.Function;
 
-import org.apache.drill.shaded.guava.com.google.common.primitives.Ints;
-import org.apache.drill.shaded.guava.com.google.common.primitives.Longs;
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
@@ -46,7 +46,9 @@ import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.store.parquet.ParquetReaderUtility;
 import org.apache.drill.exec.store.parquet.columnreaders.ParquetColumnMetadata;
-import org.apache.drill.exec.vector.complex.impl.ComplexWriterImpl;
+import org.apache.drill.exec.vector.complex.impl.RepeatedMapWriter;
+import org.apache.drill.exec.vector.complex.impl.SingleMapWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
 import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
 import org.apache.drill.exec.vector.complex.writer.BigIntWriter;
 import org.apache.drill.exec.vector.complex.writer.BitWriter;
@@ -60,114 +62,156 @@ import org.apache.drill.exec.vector.complex.writer.TimeWriter;
 import org.apache.drill.exec.vector.complex.writer.VarBinaryWriter;
 import org.apache.drill.exec.vector.complex.writer.VarCharWriter;
 import org.apache.drill.exec.vector.complex.writer.VarDecimalWriter;
+import org.apache.drill.shaded.guava.com.google.common.primitives.Ints;
+import org.apache.drill.shaded.guava.com.google.common.primitives.Longs;
 import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.io.api.Converter;
 import org.apache.parquet.io.api.GroupConverter;
 import org.apache.parquet.io.api.PrimitiveConverter;
-import org.apache.parquet.schema.DecimalMetadata;
 import org.apache.parquet.schema.GroupType;
-import org.apache.parquet.schema.MessageType;
+import org.apache.parquet.schema.OriginalType;
 import org.apache.parquet.schema.PrimitiveType;
 import org.apache.parquet.schema.Type;
 import org.apache.parquet.schema.Type.Repetition;
 import org.joda.time.DateTimeConstants;
 
-import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
-
-import io.netty.buffer.DrillBuf;
+import static org.apache.drill.common.expression.SchemaPath.DYNAMIC_STAR;
+import static org.apache.drill.exec.store.parquet.ParquetReaderUtility.NanoTimeUtils.getDateTimeValueFromBinary;
 
 public class DrillParquetGroupConverter extends GroupConverter {
 
-  private List<Converter> converters;
-  private MapWriter mapWriter;
+  private final List<Converter> converters;
+  private final BaseWriter baseWriter;
   private final OutputMutator mutator;
   private final OptionManager options;
   // See DRILL-4203
   private final ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates;
 
-  public DrillParquetGroupConverter(OutputMutator mutator, ComplexWriterImpl complexWriter, MessageType schema,
-                                    Collection<SchemaPath> columns, OptionManager options,
-                                    ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates) {
-    this(mutator, complexWriter.rootAsMap(), schema, columns, options, containsCorruptedDates);
-  }
+  /**
+   * Debugging information in form of "parent">fieldName[WriterClassName-hashCode()],
+   * where "parent" is parent converterName.
+   */
+  private final String converterName;
 
-  // This function assumes that the fields in the schema parameter are in the same order as the fields in the columns parameter. The
-  // columns parameter may have fields that are not present in the schema, though.
-  public DrillParquetGroupConverter(OutputMutator mutator, MapWriter mapWriter, GroupType schema,
+  /**
+   * The constructor is responsible for creation of converters tree and may invoke itself for
+   * creation of child converters when nested field is group type field too. Assumed that ordering of
+   * fields from schema parameter matches ordering of paths in columns list. Though columns may have fields
+   * which aren't present in schema.
+   *
+   * @param mutator                output mutator, used to share managed buffer with primitive converters
+   * @param baseWriter             map or list writer associated with the group converter
+   * @param schema                 group type of the converter
+   * @param columns                columns to project
+   * @param options                option manager used to check enabled option when necessary
+   * @param containsCorruptedDates allows to select strategy for dates handling
+   * @param skipRepeated           true only if parent field in schema detected as list and current schema is repeated group type
+   * @param parentName             name of group converter which called the constructor
+   */
+  public DrillParquetGroupConverter(OutputMutator mutator, BaseWriter baseWriter, GroupType schema,
                                     Collection<SchemaPath> columns, OptionManager options,
-                                    ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates) {
-    this.mapWriter = mapWriter;
+                                    ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates,
+                                    boolean skipRepeated, String parentName) {
+    this.converterName = String.format("%s>%s[%s-%d]", parentName, schema.getName(), baseWriter.getClass().getSimpleName(), baseWriter.hashCode());
+    this.baseWriter = baseWriter;
     this.mutator = mutator;
     this.containsCorruptedDates = containsCorruptedDates;
-    converters = Lists.newArrayList();
+    this.converters = new ArrayList<>();
     this.options = options;
 
-    Iterator<SchemaPath> colIterator=columns.iterator();
+    Iterator<SchemaPath> colIterator = columns.iterator();
 
-    for (Type type : schema.getFields()) {
-      Repetition rep = type.getRepetition();
-      boolean isPrimitive = type.isPrimitive();
+    for (final Type type : schema.getFields()) {
 
       // Match the name of the field in the schema definition to the name of the field in the query.
-      String name = null;
-      SchemaPath col;
-      PathSegment colPath;
+      String name = type.getName();
       PathSegment colNextChild = null;
       while (colIterator.hasNext()) {
-        col = colIterator.next();
-        colPath = col.getRootSegment();
-        colNextChild = colPath.getChild();
-
-        if (colPath != null && colPath.isNamed() && (!SchemaPath.DYNAMIC_STAR.equals(colPath.getNameSegment().getPath()))) {
-          name = colPath.getNameSegment().getPath();
-          // We may have a field that does not exist in the schema
-          if (!name.equalsIgnoreCase(type.getName())) {
-            continue;
-          }
+        PathSegment colPath = colIterator.next().getRootSegment();
+        String colPathName;
+        if (colPath.isNamed() &&
+            !DYNAMIC_STAR.equals(colPathName = colPath.getNameSegment().getPath()) &&
+            colPathName.equalsIgnoreCase(name)) {
+          name = colPathName;
+          colNextChild = colPath.getChild();
+          break;
         }
-        break;
-      }
-      if (name == null) {
-        name = type.getName();
       }
 
-      if (!isPrimitive) {
-        Collection<SchemaPath> c = new ArrayList<>();
+      Converter converter = createFieldConverter(skipRepeated, type, name, colNextChild);
+      converters.add(converter);
+    }
+  }
 
-        while(colNextChild!=null) {
-          if(colNextChild.isNamed()) {
-            break;
-          }
-          colNextChild=colNextChild.getChild();
-        }
+  private Converter createFieldConverter(boolean skipRepeated, Type fieldType, String name, PathSegment colNextChild) {
+    Converter converter;
+    if (fieldType.isPrimitive()) {
+      converter = getConverterForType(name, fieldType.asPrimitiveType());
+    } else {
+      while (colNextChild != null && !colNextChild.isNamed()) {
+        colNextChild = colNextChild.getChild();
+      }
 
-        if(colNextChild!=null) {
-          SchemaPath s = new SchemaPath(colNextChild.getNameSegment());
-          c.add(s);
-        }
-        if (rep != Repetition.REPEATED) {
-          DrillParquetGroupConverter converter = new DrillParquetGroupConverter(
-              mutator, mapWriter.map(name), type.asGroupType(), c, options, containsCorruptedDates);
-          converters.add(converter);
+      Collection<SchemaPath> columns = colNextChild == null
+          ? Collections.emptyList()
+          : Collections.singletonList(new SchemaPath(colNextChild.getNameSegment()));
+
+      BaseWriter writer;
+      GroupType fieldGroupType = fieldType.asGroupType();
+      if (isLogicalListType(fieldGroupType)) {
+        writer = getWriter(name, (m, s) -> m.list(s), l -> l.list());
+        converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options,
+            containsCorruptedDates, true, converterName);
+      } else if (fieldType.isRepetition(Repetition.REPEATED)) {
+        if (skipRepeated) {
+          converter = new DrillIntermediateParquetGroupConverter(mutator, baseWriter, fieldGroupType, columns, options,
+              containsCorruptedDates, false, converterName);
         } else {
-          DrillParquetGroupConverter converter = new DrillParquetGroupConverter(
-              mutator, mapWriter.list(name).map(), type.asGroupType(), c, options, containsCorruptedDates);
-          converters.add(converter);
+          writer = getWriter(name, (m, s) -> m.list(s).map(), l -> l.list().map());
+          converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options,
+              containsCorruptedDates, false, converterName);
         }
       } else {
-        PrimitiveConverter converter = getConverterForType(name, type.asPrimitiveType());
-        converters.add(converter);
+        writer = getWriter(name, (m, s) -> m.map(s), l -> l.map());
+        converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options,
+            containsCorruptedDates, false, converterName);
       }
+
     }
+    return converter;
   }
 
-  private PrimitiveConverter getConverterForType(String name, PrimitiveType type) {
+  /**
+   * Checks whether group field approximately matches pattern for Logical Lists:
+   * <list-repetition> group <name> (LIST) {
+   *   repeated group list {
+   *     <element-repetition> <element-type> element;
+   *   }
+   * }
+   * (See for more details: https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists)
+   *
+   * Note, that standard field names 'list' and 'element' aren't checked intentionally,
+   * because Hive lists have 'bag' and 'array_element' names instead.
+   *
+   * @param groupType type which may have LIST original type
+   * @return whether the type is LIST and nested field is repeated group
+   */
+  private boolean isLogicalListType(GroupType groupType) {
+    if (groupType.getOriginalType() == OriginalType.LIST && groupType.getFieldCount() == 1) {
+      Type nestedField = groupType.getFields().get(0);
+      return nestedField.isRepetition(Repetition.REPEATED)
+          && !nestedField.isPrimitive()
+          && nestedField.getOriginalType() == null
+          && nestedField.asGroupType().getFieldCount() == 1;
+    }
+    return false;
+  }
 
+  private PrimitiveConverter getConverterForType(String name, PrimitiveType type) {
     switch(type.getPrimitiveTypeName()) {
       case INT32: {
         if (type.getOriginalType() == null) {
-          IntWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).integer() : mapWriter.integer(name);
-          return new DrillIntConverter(writer);
+          return getIntConverter(name, type);
         }
         switch(type.getOriginalType()) {
           case UINT_8 :
@@ -176,19 +220,17 @@ public class DrillParquetGroupConverter extends GroupConverter {
           case INT_8  :
           case INT_16 :
           case INT_32 : {
-            IntWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).integer() : mapWriter.integer(name);
-            return new DrillIntConverter(writer);
+            return getIntConverter(name, type);
           }
           case DECIMAL: {
             ParquetReaderUtility.checkDecimalTypeEnabled(options);
-            VarDecimalWriter writer = type.getRepetition() == Repetition.REPEATED
-                ? mapWriter.list(name).varDecimal(type.getDecimalMetadata().getScale(), type.getDecimalMetadata().getPrecision())
-                : mapWriter.varDecimal(name, type.getDecimalMetadata().getScale(), type.getDecimalMetadata().getPrecision());
-            return new DrillVarDecimalConverter(writer, type.getDecimalMetadata().getPrecision(),
-                type.getDecimalMetadata().getScale(), mutator.getManagedBuffer());
+            return getVarDecimalConverter(name, type);
           }
           case DATE: {
-            DateWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).date() : mapWriter.date(name);
+            DateWriter writer = type.isRepetition(Repetition.REPEATED)
+                ? getWriter(name, (m, f) -> m.list(f).date(), l -> l.list().date())
+                : getWriter(name, (m, f) -> m.date(f), l -> l.date());
+
             switch(containsCorruptedDates) {
               case META_SHOWS_CORRUPTION:
                 return new DrillCorruptedDateConverter(writer);
@@ -204,7 +246,9 @@ public class DrillParquetGroupConverter extends GroupConverter {
             }
           }
           case TIME_MILLIS: {
-            TimeWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).time() : mapWriter.time(name);
+            TimeWriter writer = type.isRepetition(Repetition.REPEATED)
+                ? getWriter(name, (m, f) -> m.list(f).time(), l -> l.list().time())
+                : getWriter(name, (m, f) -> m.time(f), l -> l.time());
             return new DrillTimeConverter(writer);
           }
           default: {
@@ -214,27 +258,21 @@ public class DrillParquetGroupConverter extends GroupConverter {
       }
       case INT64: {
         if (type.getOriginalType() == null) {
-          BigIntWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).bigInt() : mapWriter.bigInt(name);
-          return new DrillBigIntConverter(writer);
+          return getBigIntConverter(name, type);
         }
         switch(type.getOriginalType()) {
           // DRILL-6670: handle TIMESTAMP_MICROS as INT64 with no logical type
           case UINT_64:
           case INT_64 :
           case TIMESTAMP_MICROS: {
-            BigIntWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).bigInt() : mapWriter.bigInt(name);
-            return new DrillBigIntConverter(writer);
+            return getBigIntConverter(name, type);
           }
           case DECIMAL: {
             ParquetReaderUtility.checkDecimalTypeEnabled(options);
-            VarDecimalWriter writer = type.getRepetition() == Repetition.REPEATED
-                ? mapWriter.list(name).varDecimal(type.getDecimalMetadata().getScale(), type.getDecimalMetadata().getPrecision())
-                : mapWriter.varDecimal(name, type.getDecimalMetadata().getScale(), type.getDecimalMetadata().getPrecision());
-            return new DrillVarDecimalConverter(writer, type.getDecimalMetadata().getPrecision(),
-                type.getDecimalMetadata().getScale(), mutator.getManagedBuffer());
+            return getVarDecimalConverter(name, type);
           }
           case TIMESTAMP_MILLIS: {
-            TimeStampWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).timeStamp() : mapWriter.timeStamp(name);
+            TimeStampWriter writer = getTimeStampWriter(name, type);
             return new DrillTimeStampConverter(writer);
           }
           default: {
@@ -246,50 +284,53 @@ public class DrillParquetGroupConverter extends GroupConverter {
         // TODO: replace null with TIMESTAMP_NANOS once parquet support such type annotation.
         if (type.getOriginalType() == null) {
           if (options.getOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP).bool_val) {
-            TimeStampWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).timeStamp() : mapWriter.timeStamp(name);
+            TimeStampWriter writer = getTimeStampWriter(name, type);
             return new DrillFixedBinaryToTimeStampConverter(writer);
           } else {
-            VarBinaryWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).varBinary() : mapWriter.varBinary(name);
+            VarBinaryWriter writer = type.isRepetition(Repetition.REPEATED)
+                ? getWriter(name, (m, f) -> m.list(f).varBinary(), l -> l.list().varBinary())
+                : getWriter(name, (m, f) -> m.varBinary(f), listWriter -> listWriter.varBinary());
             return new DrillFixedBinaryToVarbinaryConverter(writer, ParquetColumnMetadata.getTypeLengthInBits(type.getPrimitiveTypeName()) / 8, mutator.getManagedBuffer());
           }
         }
 
       }
       case FLOAT: {
-        Float4Writer writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).float4() : mapWriter.float4(name);
+        Float4Writer writer = type.isRepetition(Repetition.REPEATED)
+            ? getWriter(name, (m, f) -> m.list(f).float4(), l -> l.list().float4())
+            : getWriter(name, (m, f) -> m.float4(f), l -> l.float4());
         return new DrillFloat4Converter(writer);
       }
       case DOUBLE: {
-        Float8Writer writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).float8() : mapWriter.float8(name);
+        Float8Writer writer = type.isRepetition(Repetition.REPEATED)
+            ? getWriter(name, (m, f) -> m.list(f).float8(), l -> l.list().float8())
+            : getWriter(name, (m, f) -> m.float8(f), l -> l.float8());
         return new DrillFloat8Converter(writer);
       }
       case BOOLEAN: {
-        BitWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).bit() : mapWriter.bit(name);
+        BitWriter writer = type.isRepetition(Repetition.REPEATED)
+            ? getWriter(name, (m, f) -> m.list(f).bit(), l -> l.list().bit())
+            : getWriter(name, (m, f) -> m.bit(f), l -> l.bit());
         return new DrillBoolConverter(writer);
       }
       case BINARY: {
         if (type.getOriginalType() == null) {
-          VarBinaryWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).varBinary() : mapWriter.varBinary(name);
+          VarBinaryWriter writer = type.isRepetition(Repetition.REPEATED)
+              ? getWriter(name, (m, f) -> m.list(f).varBinary(), l -> l.list().varBinary())
+              : getWriter(name, (m, f) -> m.varBinary(f), l -> l.varBinary());
           return new DrillVarBinaryConverter(writer, mutator.getManagedBuffer());
         }
         switch(type.getOriginalType()) {
           case UTF8: {
-            VarCharWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).varChar() : mapWriter.varChar(name);
-            return new DrillVarCharConverter(writer, mutator.getManagedBuffer());
+            return getVarCharConverter(name, type);
           }
           case ENUM: {
-            VarCharWriter writer = type.getRepetition() == Repetition.REPEATED ? mapWriter.list(name).varChar() : mapWriter.varChar(name);
-            return new DrillVarCharConverter(writer, mutator.getManagedBuffer());
+            return getVarCharConverter(name, type);
           }
           // See DRILL-4184 and DRILL-4834. Support for this is added using new VarDecimal type.
           case DECIMAL: {
             ParquetReaderUtility.checkDecimalTypeEnabled(options);
-            DecimalMetadata metadata = type.getDecimalMetadata();
-            VarDecimalWriter writer =
-                type.getRepetition() == Repetition.REPEATED
-                    ? mapWriter.list(name).varDecimal(metadata.getScale(), metadata.getPrecision())
-                    : mapWriter.varDecimal(name, metadata.getScale(), metadata.getPrecision());
-            return new DrillVarDecimalConverter(writer, metadata.getPrecision(), metadata.getScale(), mutator.getManagedBuffer());
+            return getVarDecimalConverter(name, type);
           }
           default: {
             throw new UnsupportedOperationException("Unsupported type " + type.getOriginalType());
@@ -300,23 +341,18 @@ public class DrillParquetGroupConverter extends GroupConverter {
         switch (type.getOriginalType()) {
           case DECIMAL: {
             ParquetReaderUtility.checkDecimalTypeEnabled(options);
-            DecimalMetadata metadata = type.getDecimalMetadata();
-            VarDecimalWriter writer = type.getRepetition() == Repetition.REPEATED
-                ? mapWriter.list(name).varDecimal(metadata.getScale(), metadata.getPrecision())
-                : mapWriter.varDecimal(name, metadata.getScale(), metadata.getPrecision());
-            return new DrillVarDecimalConverter(writer, metadata.getPrecision(), metadata.getScale(), mutator.getManagedBuffer());
+            return getVarDecimalConverter(name, type);
           }
           case INTERVAL: {
-            IntervalWriter writer = type.getRepetition() == Repetition.REPEATED
-                ? mapWriter.list(name).interval()
-                : mapWriter.interval(name);
+            IntervalWriter writer = type.isRepetition(Repetition.REPEATED)
+                ? getWriter(name, (m, f) -> m.list(f).interval(), l -> l.list().interval())
+                : getWriter(name, (m, f) -> m.interval(f), l -> l.interval());
             return new DrillFixedLengthByteArrayToInterval(writer);
-
           }
           default: {
-            VarBinaryWriter writer = type.getRepetition() == Repetition.REPEATED
-                ? mapWriter.list(name).varBinary()
-                : mapWriter.varBinary(name);
+            VarBinaryWriter writer = type.isRepetition(Repetition.REPEATED)
+                ? getWriter(name, (m, f) -> m.list(f).varBinary(), l -> l.list().varBinary())
+                : getWriter(name, (m, f) -> m.varBinary(f), l -> l.varBinary());
             return new DrillFixedBinaryToVarbinaryConverter(writer, type.getTypeLength(), mutator.getManagedBuffer());
           }
         }
@@ -325,6 +361,42 @@ public class DrillParquetGroupConverter extends GroupConverter {
     }
   }
 
+  private PrimitiveConverter getVarCharConverter(String name, PrimitiveType type) {
+    VarCharWriter writer = type.isRepetition(Repetition.REPEATED)
+        ? getWriter(name, (m, f) -> m.list(f).varChar(), l -> l.list().varChar())
+        : getWriter(name, (m, f) -> m.varChar(f), l -> l.varChar());
+    return new DrillVarCharConverter(writer, mutator.getManagedBuffer());
+  }
+
+  private TimeStampWriter getTimeStampWriter(String name, PrimitiveType type) {
+    return type.isRepetition(Repetition.REPEATED)
+        ? getWriter(name, (m, f) -> m.list(f).timeStamp(), l -> l.list().timeStamp())
+        : getWriter(name, (m, f) -> m.timeStamp(f), l -> l.timeStamp());
+  }
+
+  private PrimitiveConverter getBigIntConverter(String name, PrimitiveType type) {
+    BigIntWriter writer = type.isRepetition(Repetition.REPEATED)
+        ? getWriter(name, (m, f) -> m.list(f).bigInt(), l -> l.list().bigInt())
+        : getWriter(name, (m, f) -> m.bigInt(f), l -> l.bigInt());
+    return new DrillBigIntConverter(writer);
+  }
+
+  private PrimitiveConverter getIntConverter(String name, PrimitiveType type) {
+    IntWriter writer = type.isRepetition(Repetition.REPEATED)
+        ? getWriter(name, (m, f) -> m.list(f).integer(), l -> l.list().integer())
+        : getWriter(name, (m, f) -> m.integer(f), l -> l.integer());
+    return new DrillIntConverter(writer);
+  }
+
+  private PrimitiveConverter getVarDecimalConverter(String name, PrimitiveType type) {
+    int s = type.getDecimalMetadata().getScale();
+    int p = type.getDecimalMetadata().getPrecision();
+    VarDecimalWriter writer = type.isRepetition(Repetition.REPEATED)
+        ? getWriter(name, (m, f) -> m.list(f).varDecimal(s, p), l -> l.list().varDecimal(s, p))
+        : getWriter(name, (m, f) -> m.varDecimal(f, s, p), l -> l.varDecimal(s, p));
+    return new DrillVarDecimalConverter(writer, p, s, mutator.getManagedBuffer());
+  }
+
   @Override
   public Converter getConverter(int i) {
     return converters.get(i);
@@ -332,12 +404,35 @@ public class DrillParquetGroupConverter extends GroupConverter {
 
   @Override
   public void start() {
-    mapWriter.start();
+    if (baseWriter instanceof SingleMapWriter || baseWriter instanceof RepeatedMapWriter) {
+      ((MapWriter) baseWriter).start();
+    } else {
+      ((BaseWriter.ListWriter) baseWriter).startList();
+    }
   }
 
   @Override
   public void end() {
-    mapWriter.end();
+    if (baseWriter instanceof SingleMapWriter || baseWriter instanceof RepeatedMapWriter) {
+      ((MapWriter) baseWriter).end();
+    } else {
+      ((BaseWriter.ListWriter) baseWriter).endList();
+    }
+  }
+
+  @Override
+  public String toString() {
+    return converterName;
+  }
+
+  private <T> T getWriter(String name, BiFunction<MapWriter, String, T> fromMap, Function<BaseWriter.ListWriter, T> fromList) {
+    if (baseWriter instanceof SingleMapWriter || baseWriter instanceof RepeatedMapWriter) {
+      return fromMap.apply((MapWriter) baseWriter, name);
+    } else if (baseWriter instanceof BaseWriter.ListWriter) {
+      return fromList.apply((BaseWriter.ListWriter) baseWriter);
+    } else {
+      throw new IllegalStateException(String.format("Parent writer with type [%s] is unsupported", baseWriter.getClass()));
+    }
   }
 
   public static class DrillIntConverter extends PrimitiveConverter {
@@ -634,4 +729,24 @@ public class DrillParquetGroupConverter extends GroupConverter {
       writer.write(holder);
     }
   }
+
+  /**
+   * Converter for field which is present in schema but don't need any actions to be performed by writer.
+   * For this purpose the converter is added to converter's chain but simply does nothing and actual writing
+   * will be performed by other converters in the chain.
+   */
+  private static class DrillIntermediateParquetGroupConverter extends DrillParquetGroupConverter {
+
+    DrillIntermediateParquetGroupConverter(OutputMutator mutator, BaseWriter baseWriter, GroupType schema,
+                                           Collection<SchemaPath> columns, OptionManager options,
+                                           ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates,
+                                           boolean skipRepeated, String parentName) {
+      super(mutator, baseWriter, schema, columns, options, containsCorruptedDates, skipRepeated, parentName);
+    }
+
+    public void start() {}
+    public void end() {}
+  }
+
+
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
index f338d2b..dd0ec0a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
@@ -58,13 +58,13 @@ import org.apache.parquet.hadoop.metadata.ParquetMetadata;
 import org.apache.parquet.io.ColumnIOFactory;
 import org.apache.parquet.io.MessageColumnIO;
 import org.apache.parquet.io.RecordReader;
-import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.Type;
 
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
+import org.apache.parquet.schema.Types;
 
 public class DrillParquetReader extends CommonParquetRecordReader {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillParquetReader.class);
@@ -106,9 +106,9 @@ public class DrillParquetReader extends CommonParquetRecordReader {
     this.recordsPerBatch = (int) fragmentContext.getOptions().getLong(ExecConstants.PARQUET_COMPLEX_BATCH_NUM_RECORDS);
   }
 
-  public static MessageType getProjection(MessageType schema,
-                                          Collection<SchemaPath> columns,
-                                          List<SchemaPath> columnsNotFound) {
+  private static MessageType getProjection(MessageType schema,
+                                           Collection<SchemaPath> columns,
+                                           List<SchemaPath> columnsNotFound) {
     MessageType projection = null;
 
     String messageName = schema.getName();
@@ -121,17 +121,15 @@ public class DrillParquetReader extends CommonParquetRecordReader {
     // get a list of modified columns which have the array elements removed from the schema path since parquet schema doesn't include array elements
     List<SchemaPath> modifiedColumns = Lists.newLinkedList();
     for (SchemaPath path : columns) {
+
       List<String> segments = Lists.newArrayList();
-      PathSegment seg = path.getRootSegment();
-      do {
+      for (PathSegment seg = path.getRootSegment(); seg != null; seg = seg.getChild()) {
         if (seg.isNamed()) {
           segments.add(seg.getNameSegment().getPath());
         }
-      } while ((seg = seg.getChild()) != null);
-      String[] pathSegments = new String[segments.size()];
-      segments.toArray(pathSegments);
-      SchemaPath modifiedSchemaPath = SchemaPath.getCompoundPath(pathSegments);
-      modifiedColumns.add(modifiedSchemaPath);
+      }
+
+      modifiedColumns.add(SchemaPath.getCompoundPath(segments.toArray(new String[0])));
     }
 
     // convert the columns in the parquet schema to a list of SchemaPath columns so that they can be compared in case insensitive manner
@@ -268,12 +266,16 @@ public class DrillParquetReader extends CommonParquetRecordReader {
   }
 
   private static Type getType(String[] pathSegments, int depth, MessageType schema) {
-    Type type = schema.getType(Arrays.copyOfRange(pathSegments, 0, depth + 1));
-    if (depth + 1 == pathSegments.length) {
+    int nextDepth = depth + 1;
+    Type type = schema.getType(Arrays.copyOfRange(pathSegments, 0, nextDepth));
+    if (nextDepth == pathSegments.length) {
       return type;
     } else {
       Preconditions.checkState(!type.isPrimitive());
-      return new GroupType(type.getRepetition(), type.getName(), getType(pathSegments, depth + 1, schema));
+      return Types.buildGroup(type.getRepetition())
+          .as(type.getOriginalType())
+          .addField(getType(pathSegments, nextDepth, schema))
+          .named(type.getName());
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetRecordMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetRecordMaterializer.java
index 04f2d8d..9d746e5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetRecordMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetRecordMaterializer.java
@@ -38,7 +38,7 @@ public class DrillParquetRecordMaterializer extends RecordMaterializer<Void> {
                                         Collection<SchemaPath> columns, OptionManager options,
                                         ParquetReaderUtility.DateCorruptionStatus containsCorruptedDates) {
     writer = new VectorContainerWriter(mutator);
-    root = new DrillParquetGroupConverter(mutator, writer.rootAsMap(), schema, columns, options, containsCorruptedDates);
+    root = new DrillParquetGroupConverter(mutator, writer.rootAsMap(), schema, columns, options, containsCorruptedDates, false, "");
   }
 
   public void setPosition(int position) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
index 4b5ed7a..957e39e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java
@@ -24,15 +24,21 @@ import java.time.LocalDateTime;
 import java.time.ZoneOffset;
 import java.time.ZonedDateTime;
 import java.util.Arrays;
+import java.util.Collections;
 
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
+import org.apache.drill.exec.util.Text;
 import org.apache.drill.test.BaseTestQuery;
 import org.joda.time.Period;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import static java.util.Arrays.asList;
+import static java.util.Collections.emptyList;
+import static org.apache.drill.exec.expr.fn.impl.DateUtility.parseLocalDate;
+
 public class TestDrillParquetReader extends BaseTestQuery {
   // enable decimal data type and make sure DrillParquetReader is used to handle test queries
   @BeforeClass
@@ -447,4 +453,853 @@ public class TestDrillParquetReader extends BaseTestQuery {
         .go();
   }
 
+  @Test
+  public void hiveIntArray() throws Exception {
+    // Nesting 0: reading ARRAY<INT>
+    testBuilder()
+        .sqlQuery("SELECT int_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("int_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-1, 0, 1))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(100500))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<INT>>
+    testBuilder()
+        .sqlQuery("SELECT int_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("int_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(-1, 0, 1), asList(-2, 1)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(100500, 500100)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<INT>>>
+    testBuilder()
+        .sqlQuery("SELECT int_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("int_arr_n_2")
+        .baselineValues(asList(
+            asList(asList(7, 81), asList(-92, 54, -83), asList(-10, -59)),
+            asList(asList(-43, -80)),
+            asList(asList(-70, -62))
+        ))
+        .baselineValues(asList(
+            asList(asList(34, -18)),
+            asList(asList(-87, 87), asList(52, 58), asList(58, 20, -81), asList(-94, -93))
+        ))
+        .baselineValues(asList(
+            asList(asList(-56, 9), asList(39, 5)),
+            asList(asList(28, 88, -28))
+        ))
+        .go();
+  }
+
+  @Test
+  public void hiveBooleanArray() throws Exception {
+    // Nesting 0: reading ARRAY<BOOLEAN>
+    testBuilder()
+        .sqlQuery("SELECT boolean_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("boolean_arr_n_0")
+        .baselineValuesForSingleColumn(asList(false, true, false, true, false))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(true))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<BOOLEAN>>
+    testBuilder()
+        .sqlQuery("SELECT boolean_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("boolean_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(true, false, true), asList(false, false)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(false, true)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<BOOLEAN>>>
+    testBuilder()
+        .sqlQuery("SELECT boolean_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("boolean_arr_n_2")
+        .baselineValues(asList(
+            asList(asList(false, true)),
+            asList(asList(true), asList(false, true), asList(true), asList(true)),
+            asList(asList(false), asList(true, false, false), asList(true, true), asList(false, true, false)),
+            asList(asList(false, true), asList(true, false), asList(true, false, true)),
+            asList(asList(false), asList(false), asList(false))
+        ))
+        .baselineValues(asList(
+            asList(asList(false, true), asList(false), asList(false, false), asList(true, true, true), asList(false)),
+            asList(asList(false, false, true)),
+            asList(asList(false, true), asList(true, false))
+        ))
+        .baselineValues(asList(
+            asList(asList(true, true), asList(false, true, false), asList(true), asList(true, true, false)),
+            asList(asList(false), asList(false, true), asList(false), asList(false)),
+            asList(asList(true, true, true), asList(true, true, true), asList(false), asList(false)),
+            asList(asList(false, false))
+        ))
+        .go();
+  }
+
+  @Test
+  public void hiveCharArray() throws Exception {
+    // Nesting 0: reading ARRAY<CHAR(2)>
+    testBuilder()
+        .sqlQuery("SELECT char_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("char_arr_n_0")
+        .baselineValuesForSingleColumn(asList(new Text("aa"), new Text("cc"), new Text("ot")))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(new Text("+a"), new Text("-c"), new Text("*t")))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<CHAR(2)>>
+    testBuilder()
+        .sqlQuery("SELECT char_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("char_arr_n_1")
+        .baselineValuesForSingleColumn(asList(
+            asList(new Text("aa")),
+            asList(new Text("cc"), new Text("ot"))))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(new Text("*t"))))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<CHAR(2)>>>
+    testBuilder()
+        .sqlQuery("SELECT char_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("char_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("eT"))//[0][0]
+                ),
+                asList( // [1]
+                    asList(new Text("w9"), new Text("fC"), new Text("ww")),//[1][0]
+                    asList(new Text("3o"), new Text("f7"), new Text("Za")),//[1][1]
+                    asList(new Text("lX"), new Text("iv"), new Text("jI"))//[1][2]
+                ),
+                asList( // [2]
+                    asList(new Text("S3"), new Text("Qa"), new Text("aG")),//[2][0]
+                    asList(new Text("bj"), new Text("gc"), new Text("NO"))//[2][1]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("PV"), new Text("tH"), new Text("B7")),//[0][0]
+                    asList(new Text("uL")),//[0][1]
+                    asList(new Text("7b"), new Text("uf")),//[0][2]
+                    asList(new Text("zj")),//[0][3]
+                    asList(new Text("sA"), new Text("hf"), new Text("hR"))//[0][4]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("W1"), new Text("FS")),//[0][0]
+                    asList(new Text("le"), new Text("c0")),//[0][1]
+                    asList(new Text(""), new Text("0v"))//[0][2]
+                ),
+                asList( // [1]
+                    asList(new Text("gj"))//[1][0]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveBigintArray() throws Exception {
+    // Nesting 0: reading ARRAY<BIGINT>
+    testBuilder()
+        .sqlQuery("SELECT bigint_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("bigint_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-9223372036854775808L, 0L, 10000000010L, 9223372036854775807L))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(10005000L))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<BIGINT>>
+    testBuilder()
+        .sqlQuery("SELECT bigint_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("bigint_arr_n_1")
+        .baselineValuesForSingleColumn(asList(
+            asList(-9223372036854775808L, 0L, 10000000010L),
+            asList(9223372036854775807L, 9223372036854775807L)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(10005000L, 100050010L)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<BIGINT>>>
+    testBuilder()
+        .sqlQuery("SELECT bigint_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("bigint_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(7345032157033769004L),//[0][0]
+                    asList(-2306607274383855051L, 3656249581579032003L)//[0][1]
+                ),
+                asList( // [1]
+                    asList(6044100897358387146L, 4737705104728607904L)//[1][0]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(4833583793282587107L, -8917877693351417844L, -3226305034926780974L)//[0][0]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(8679405200896733338L, 8581721713860760451L, 1150622751848016114L),//[0][0]
+                    asList(-6672104994192826124L, 4807952216371616134L),//[0][1]
+                    asList(-7874492057876324257L)//[0][2]
+                ),
+                asList( // [1]
+                    asList(8197656735200560038L),//[1][0]
+                    asList(7643173300425098029L, -3186442699228156213L, -8370345321491335247L),//[1][1]
+                    asList(8781633305391982544L, -7187468334864189662L)//[1][2]
+                ),
+                asList( // [2]
+                    asList(6685428436181310098L),//[2][0]
+                    asList(1358587806266610826L),//[2][1]
+                    asList(-2077124879355227614L, -6787493227661516341L),//[2][2]
+                    asList(3713296190482954025L, -3890396613053404789L),//[2][3]
+                    asList(4636761050236625699L, 5268453104977816600L)//[2][4]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveDateArray() throws Exception {
+
+    // Nesting 0: reading ARRAY<DATE>
+    testBuilder()
+        .sqlQuery("SELECT date_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("date_arr_n_0")
+        .baselineValuesForSingleColumn(asList(
+            parseLocalDate("2018-10-21"),
+            parseLocalDate("2017-07-11"),
+            parseLocalDate("2018-09-23")))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(parseLocalDate("2018-07-14")))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<DATE>>
+    testBuilder()
+        .sqlQuery("SELECT date_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("date_arr_n_1")
+        .baselineValuesForSingleColumn(asList(
+            asList(parseLocalDate("2017-03-21"), parseLocalDate("2017-09-10"), parseLocalDate("2018-01-17")),
+            asList(parseLocalDate("2017-03-24"), parseLocalDate("2018-09-22"))))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(parseLocalDate("2017-08-09"), parseLocalDate("2017-08-28"))))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<DATE>>>
+    testBuilder()
+        .sqlQuery("SELECT date_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("date_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(parseLocalDate("1952-08-24")),//[0][0]
+                    asList(parseLocalDate("1968-10-05"), parseLocalDate("1951-07-27")),//[0][1]
+                    asList(parseLocalDate("1943-11-18"), parseLocalDate("1991-04-27"))//[0][2]
+                ),
+                asList( // [1]
+                    asList(parseLocalDate("1981-12-27"), parseLocalDate("1984-02-03")),//[1][0]
+                    asList(parseLocalDate("1953-04-15"), parseLocalDate("2002-08-15"), parseLocalDate("1926-12-10")),//[1][1]
+                    asList(parseLocalDate("2009-08-09"), parseLocalDate("1919-08-30"), parseLocalDate("1906-04-10")),//[1][2]
+                    asList(parseLocalDate("1995-10-28"), parseLocalDate("1989-09-07")),//[1][3]
+                    asList(parseLocalDate("2002-01-03"), parseLocalDate("1929-03-17"), parseLocalDate("1939-10-23"))//[1][4]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(parseLocalDate("1936-05-05"), parseLocalDate("1941-04-12"), parseLocalDate("1914-04-15"))//[0][0]
+                ),
+                asList( // [1]
+                    asList(parseLocalDate("1944-05-09"), parseLocalDate("2002-02-11"))//[1][0]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(parseLocalDate("1965-04-18"), parseLocalDate("2012-11-07"), parseLocalDate("1961-03-15")),//[0][0]
+                    asList(parseLocalDate("1922-05-22"), parseLocalDate("1978-03-25")),//[0][1]
+                    asList(parseLocalDate("1935-05-29"))//[0][2]
+                ),
+                asList( // [1]
+                    asList(parseLocalDate("1904-07-08"), parseLocalDate("1968-05-23"), parseLocalDate("1946-03-31")),//[1][0]
+                    asList(parseLocalDate("2014-01-28")),//[1][1]
+                    asList(parseLocalDate("1938-09-20"), parseLocalDate("1920-07-09"), parseLocalDate("1990-12-31")),//[1][2]
+                    asList(parseLocalDate("1984-07-20"), parseLocalDate("1988-11-25")),//[1][3]
+                    asList(parseLocalDate("1941-12-21"), parseLocalDate("1939-01-16"), parseLocalDate("2012-09-19"))//[1][4]
+                ),
+                asList( // [2]
+                    asList(parseLocalDate("2020-12-28")),//[2][0]
+                    asList(parseLocalDate("1930-11-13")),//[2][1]
+                    asList(parseLocalDate("2014-05-02"), parseLocalDate("1935-02-16"), parseLocalDate("1919-01-17")),//[2][2]
+                    asList(parseLocalDate("1972-04-20"), parseLocalDate("1951-05-30"), parseLocalDate("1963-01-11"))//[2][3]
+                ),
+                asList( // [3]
+                    asList(parseLocalDate("1993-03-20"), parseLocalDate("1978-12-31")),//[3][0]
+                    asList(parseLocalDate("1965-12-15"), parseLocalDate("1970-09-02"), parseLocalDate("2010-05-25"))//[3][1]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveDecimalArray() throws Exception {
+    // Nesting 0: reading ARRAY<DECIMAL(9,3)>
+    testBuilder()
+        .sqlQuery("SELECT decimal_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("decimal_arr_n_0")
+        .baselineValuesForSingleColumn(asList(new BigDecimal("-100000.000"), new BigDecimal("102030.001"), new BigDecimal("0.001")))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(new BigDecimal("-10.500")))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<DECIMAL(9,3)>>
+    testBuilder()
+        .sqlQuery("SELECT decimal_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("decimal_arr_n_1")
+        .baselineValuesForSingleColumn(asList(
+            asList(new BigDecimal("-100000.000"), new BigDecimal("102030.001")),
+            asList(new BigDecimal("0.101"), new BigDecimal("0.102")),
+            asList(new BigDecimal("0.001"), new BigDecimal("327670.001"))))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(new BigDecimal("10.500"), new BigDecimal("5.010"))))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<DECIMAL(9,3)>>>
+    testBuilder()
+        .sqlQuery("SELECT decimal_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("decimal_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new BigDecimal("9.453")),//[0][0]
+                    asList(new BigDecimal("8.233"), new BigDecimal("-146577.465")),//[0][1]
+                    asList(new BigDecimal("-911144.423"), new BigDecimal("-862766.866"), new BigDecimal("-129948.784"))//[0][2]
+                ),
+                asList( // [1]
+                    asList(new BigDecimal("931346.867"))//[1][0]
+                ),
+                asList( // [2]
+                    asList(new BigDecimal("81.750")),//[2][0]
+                    asList(new BigDecimal("587225.077"), new BigDecimal("-3.930")),//[2][1]
+                    asList(new BigDecimal("0.042")),//[2][2]
+                    asList(new BigDecimal("-342346.511"))//[2][3]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new BigDecimal("375098.406"), new BigDecimal("84.509")),//[0][0]
+                    asList(new BigDecimal("-446325.287"), new BigDecimal("3.671")),//[0][1]
+                    asList(new BigDecimal("286958.380"), new BigDecimal("314821.890"), new BigDecimal("18513.303")),//[0][2]
+                    asList(new BigDecimal("-444023.971"), new BigDecimal("827746.528"), new BigDecimal("-54.986")),//[0][3]
+                    asList(new BigDecimal("-44520.406"))//[0][4]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new BigDecimal("906668.849"), new BigDecimal("1.406")),//[0][0]
+                    asList(new BigDecimal("-494177.333"), new BigDecimal("952997.058"))//[0][1]
+                ),
+                asList( // [1]
+                    asList(new BigDecimal("642385.159"), new BigDecimal("369753.830"), new BigDecimal("634889.981")),//[1][0]
+                    asList(new BigDecimal("83970.515"), new BigDecimal("-847315.758"), new BigDecimal("-0.600")),//[1][1]
+                    asList(new BigDecimal("73013.870")),//[1][2]
+                    asList(new BigDecimal("337872.675"), new BigDecimal("375940.114"), new BigDecimal("-2.670")),//[1][3]
+                    asList(new BigDecimal("-7.899"), new BigDecimal("755611.538"))//[1][4]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveDoubleArray() throws Exception {
+    // Nesting 0: reading ARRAY<DOUBLE>
+    testBuilder()
+        .sqlQuery("SELECT double_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("double_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-13.241563769628, 0.3436367772981237, 9.73366))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(15.581409176959358))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<DOUBLE>>
+    testBuilder()
+        .sqlQuery("SELECT double_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("double_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(-24.049666910012498, 14.975034200, 1.19975056092457), asList(-2.293376758961259, 80.783)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(0.47745359256854, -0.47745359256854)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<DOUBLE>>>
+    testBuilder()
+        .sqlQuery("SELECT double_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("double_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(-9.269519394436928),//[0][0]
+                    asList(0.7319990286742192, 55.53357952933713, -4.450389221972496)//[0][1]
+                ),
+                asList( // [1]
+                    asList(0.8453724066773386)//[1][0]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(-7966.1700155142025, 2519.664646202656),//[0][0]
+                    asList(-0.4584683555041169),//[0][1]
+                    asList(-860.4673046946417, 6.371900064750405, 0.4722917366204724)//[0][2]
+                ),
+                asList( // [1]
+                    asList(-62.76596817199298),//[1][0]
+                    asList(712.7880069076203, -5.14172156610055),//[1][1]
+                    asList(3891.128276893486, -0.5008908018575201)//[1][2]
+                ),
+                asList( // [2]
+                    asList(246.42074787345825, -0.7252828610111548),//[2][0]
+                    asList(-845.6633966327038, -436.5267842528363)//[2][1]
+                ),
+                asList( // [3]
+                    asList(5.177407969462521),//[3][0]
+                    asList(0.10545048230228471, 0.7364424942282094),//[3][1]
+                    asList(-373.3798205258425, -79.65616885610245)//[3][2]
+                ),
+                asList( // [4]
+                    asList(-744.3464669962211, 3.8376055596419754),//[4][0]
+                    asList(5784.252615154324, -4792.10612059247, -2535.4093308546435)//[4][1]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(0.054727088545119096, 0.3289046600776335, -183.0613955159468)//[0][0]
+                ),
+                asList( // [1]
+                    asList(-1653.1119499932845, 5132.117249049659),//[1][0]
+                    asList(735.8474815185632, -5.4205625353286795),//[1][1]
+                    asList(2.9513430741605107, -7513.09536433704),//[1][2]
+                    asList(1660.4238619967039),//[1][3]
+                    asList(472.7475322920831)//[1][4]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveFloatArray() throws Exception {
+    // Nesting 0: reading ARRAY<FLOAT>
+    testBuilder()
+        .sqlQuery("SELECT float_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("float_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-32.058f, 94.47389f, 16.107912f))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(25.96484f))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<FLOAT>>
+    testBuilder()
+        .sqlQuery("SELECT float_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("float_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(-82.399826f, 12.633938f, 86.19402f), asList(-13.03544f, 64.65487f)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(15.259451f, -15.259451f)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<FLOAT>>>
+    testBuilder()
+        .sqlQuery("SELECT float_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("float_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(-5.6506114f),//[0][0]
+                    asList(26.546333f, 3724.8389f),//[0][1]
+                    asList(-53.65775f, 686.8335f, -0.99032f)//[0][2]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(29.042528f),//[0][0]
+                    asList(3524.3398f, -8856.58f, 6.8508215f)//[0][1]
+                ),
+                asList( // [1]
+                    asList(-0.73994386f, -2.0008986f),//[1][0]
+                    asList(-9.903006f, -271.26172f),//[1][1]
+                    asList(-131.80347f),//[1][2]
+                    asList(39.721367f, -4870.5444f),//[1][3]
+                    asList(-1.4830998f, -766.3066f, -0.1659732f)//[1][4]
+                ),
+                asList( // [2]
+                    asList(3467.0298f, -240.64255f),//[2][0]
+                    asList(2.4072556f, -85.89145f)//[2][1]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(-888.68243f, -38.09065f),//[0][0]
+                    asList(-6948.154f, -185.64319f, 0.7401936f),//[0][1]
+                    asList(-705.2718f, -932.4041f)//[0][2]
+                ),
+                asList( // [1]
+                    asList(-2.581712f, 0.28686252f, -0.98652786f),//[1][0]
+                    asList(-57.448563f, -0.0057083773f, -0.21712556f),//[1][1]
+                    asList(-8.076653f, -8149.519f, -7.5968184f),//[1][2]
+                    asList(8.823492f),//[1][3]
+                    asList(-9134.323f, 467.53275f, -59.763447f)//[1][4]
+                ),
+                asList( // [2]
+                    asList(0.33596575f, 6805.2256f, -3087.9531f),//[2][0]
+                    asList(9816.865f, -164.90712f, -1.9071647f)//[2][1]
+                ),
+                asList( // [3]
+                    asList(-0.23883149f),//[3][0]
+                    asList(-5.3763375f, -4.7661624f)//[3][1]
+                ),
+                asList( // [4]
+                    asList(-52.42167f, 247.91452f),//[4][0]
+                    asList(9499.771f),//[4][1]
+                    asList(-0.6549191f, 4340.83f)//[4][2]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveSmallintArray() throws Exception {
+    // Nesting 0: reading ARRAY<SMALLINT>
+    testBuilder()
+        .sqlQuery("SELECT smallint_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("smallint_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-32768, 0, 32767))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(10500))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<SMALLINT>>
+    testBuilder()
+        .sqlQuery("SELECT smallint_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("smallint_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(-32768, -32768), asList(0, 0), asList(32767, 32767)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(10500, 5010)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<SMALLINT>>>
+    testBuilder()
+        .sqlQuery("SELECT smallint_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("smallint_arr_n_2")
+        .baselineValues(asList(
+            asList(asList(-28752)),
+            asList(asList(17243, 15652), asList(-9684), asList(10176, 18123), asList(-15404, 15420), asList(11136, -19435)),
+            asList(asList(-29634, -12695), asList(4350, -24289, -10889)),
+            asList(asList(13731), asList(27661, -15794, 21784), asList(14341, -4635), asList(1601, -29973), asList(2750, 30373, -11630)),
+            asList(asList(-11383))
+        ))
+        .baselineValues(asList(
+            asList(asList(23860), asList(-27345, 19068), asList(-7174, 286, 14673)),
+            asList(asList(14844, -9087), asList(-25185, 219), asList(26875), asList(-4699), asList(-3853, -15729, 11472)),
+            asList(asList(-29142), asList(-13859), asList(-23073, 31368, -26542)),
+            asList(asList(14914, 14656), asList(4636, 6289))
+        ))
+        .baselineValues(asList(
+            asList(asList(10426, 31865), asList(-19088), asList(-4774), asList(17988)),
+            asList(asList(-6214, -26836, 30715)),
+            asList(asList(-4231), asList(31742, -661), asList(-22842, 4203), asList(18278))
+        ))
+        .go();
+  }
+
+  @Test
+  public void hiveStringArray() throws Exception {
+    // Nesting 0: reading ARRAY<STRING>
+    testBuilder()
+        .sqlQuery("SELECT string_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("string_arr_n_0")
+        .baselineValuesForSingleColumn(asList(new Text("First Value Of Array"), new Text("komlnp"), new Text("The Last Value")))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(Collections.singletonList(new Text("ABCaBcA-1-2-3")))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<STRING>>
+    testBuilder()
+        .sqlQuery("SELECT string_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("string_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(new Text("Array 0, Value 0"), new Text("Array 0, Value 1")), asList(new Text("Array 1"))))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(new Text("One"))))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<STRING>>>
+    testBuilder()
+        .sqlQuery("SELECT string_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("string_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("dhMGOr1QVO"), new Text("NZpzBl"), new Text("LC8mjYyOJ7l8dHUpk"))//[0][0]
+                ),
+                asList( // [1]
+                    asList(new Text("JH")),//[1][0]
+                    asList(new Text("aVxgfxAu")),//[1][1]
+                    asList(new Text("fF amN8z8"))//[1][2]
+                ),
+                asList( // [2]
+                    asList(new Text("denwte5R39dSb2PeG"), new Text("Gbosj97RXTvBK1w"), new Text("S3whFvN")),//[2][0]
+                    asList(new Text("2sNbYGQhkt303Gnu"), new Text("rwG"), new Text("SQH766A8XwHg2pTA6a"))//[2][1]
+                ),
+                asList( // [3]
+                    asList(new Text("L"), new Text("khGFDtDluFNoo5hT")),//[3][0]
+                    asList(new Text("b8")),//[3][1]
+                    asList(new Text("Z"))//[3][2]
+                ),
+                asList( // [4]
+                    asList(new Text("DTEuW"), new Text("b0Wt84hIl"), new Text("A1H")),//[4][0]
+                    asList(new Text("h2zXh3Qc"), new Text("NOcgU8"), new Text("RGfVgv2rvDG")),//[4][1]
+                    asList(new Text("Hfn1ov9hB7fZN"), new Text("0ZgCD3"))//[4][2]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("nk"), new Text("HA"), new Text("CgAZCxTbTrFWJL3yM")),//[0][0]
+                    asList(new Text("T7fGXYwtBb"), new Text("G6vc")),//[0][1]
+                    asList(new Text("GrwB5j3LBy9")),//[0][2]
+                    asList(new Text("g7UreegD1H97"), new Text("dniQ5Ehhps7c1pBuM"), new Text("S wSNMGj7c")),//[0][3]
+                    asList(new Text("iWTEJS0"), new Text("4F"))//[0][4]
+                ),
+                asList( // [1]
+                    asList(new Text("YpRcC01u6i6KO"), new Text("ujpMrvEfUWfKm"), new Text("2d")),//[1][0]
+                    asList(new Text("2"), new Text("HVDH"), new Text("5Qx Q6W112"))//[1][1]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(new Text("S8d2vjNu680hSim6iJ")),//[0][0]
+                    asList(new Text("lRLaT9RvvgzhZ3C"), new Text("igSX1CP"), new Text("FFZMwMvAOod8")),//[0][1]
+                    asList(new Text("iBX"), new Text("sG")),//[0][2]
+                    asList(new Text("ChRjuDPz99WeU9"), new Text("2gBBmMUXV9E5E"), new Text(" VkEARI2upO"))//[0][3]
+                ),
+                asList( // [1]
+                    asList(new Text("UgMok3Q5wmd")),//[1][0]
+                    asList(new Text("8Zf9CLfUSWK"), new Text(""), new Text("NZ7v")),//[1][1]
+                    asList(new Text("vQE3I5t26"), new Text("251BeQJue"))//[1][2]
+                ),
+                asList( // [2]
+                    asList(new Text("Rpo8"))//[2][0]
+                ),
+                asList( // [3]
+                    asList(new Text("jj3njyupewOM Ej0pu"), new Text("aePLtGgtyu4aJ5"), new Text("cKHSvNbImH1MkQmw0Cs")),//[3][0]
+                    asList(new Text("VSO5JgI2x7TnK31L5"), new Text("hIub"), new Text("eoBSa0zUFlwroSucU")),//[3][1]
+                    asList(new Text("V8Gny91lT"), new Text("5hBncDZ"))//[3][2]
+                ),
+                asList( // [4]
+                    asList(new Text("Y3"), new Text("StcgywfU"), new Text("BFTDChc")),//[4][0]
+                    asList(new Text("5JNwXc2UHLld7"), new Text("v")),//[4][1]
+                    asList(new Text("9UwBhJMSDftPKuGC")),//[4][2]
+                    asList(new Text("E hQ9NJkc0GcMlB"), new Text("IVND1Xp1Nnw26DrL9"))//[4][3]
+                )
+            )
+        ).go();
+  }
+
+  @Test
+  public void hiveTimestampArray() throws Exception {
+    mockUtcDateTimeZone();
+
+    // Nesting 0: reading ARRAY<TIMESTAMP>
+    testBuilder()
+        .optionSettingQueriesForTestQuery("alter session set `" + ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP + "` = true")
+        .sqlQuery("SELECT timestamp_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("timestamp_arr_n_0")
+        .baselineValuesForSingleColumn(asList(
+            LocalDateTime.of(2018, 10, 21, 1, 51, 36),
+            LocalDateTime.of(2017, 7, 11, 6, 26, 48),
+            LocalDateTime.of(2018, 9, 23, 0, 2, 33)))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(LocalDateTime.of(2018, 7, 14, 2, 20, 34)))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<TIMESTAMP>>
+    testBuilder()
+        .optionSettingQueriesForTestQuery("alter session set `" + ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP + "` = true")
+        .sqlQuery("SELECT timestamp_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("timestamp_arr_n_1")
+        .baselineValuesForSingleColumn(asList(
+            asList(LocalDateTime.of(2017, 3, 21, 10, 52, 33), LocalDateTime.of(2017, 9, 9, 22, 29, 24), LocalDateTime.of(2018, 1, 17, 2, 45, 23)),
+            asList(LocalDateTime.of(2017, 3, 23, 23, 3, 23), LocalDateTime.of(2018, 9, 22, 2, 0, 26))))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(LocalDateTime.of(2017, 8, 9, 5, 26, 8), LocalDateTime.of(2017, 8, 28, 6, 47, 23))))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<TIMESTAMP>>>
+    testBuilder()
+        .optionSettingQueriesForTestQuery("alter session set `" + ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP + "` = true")
+        .sqlQuery("SELECT timestamp_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("timestamp_arr_n_2")
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(LocalDateTime.of(1929, 1, 8, 17, 31, 47)),//[0][0]
+                    asList(LocalDateTime.of(1968, 7, 2, 12, 13, 55), LocalDateTime.of(1990, 1, 25, 18, 5, 51), LocalDateTime.of(1950, 10, 26, 16, 16, 10)),//[0][1]
+                    asList(LocalDateTime.of(1946, 9, 3, 0, 3, 50), LocalDateTime.of(1987, 3, 29, 7, 27, 5)),//[0][2]
+                    asList(LocalDateTime.of(1979, 11, 29, 6, 1, 14))//[0][3]
+                ),
+                asList( // [1]
+                    asList(LocalDateTime.of(2010, 8, 26, 9, 8, 51), LocalDateTime.of(2012, 2, 5, 0, 34, 22)),//[1][0]
+                    asList(LocalDateTime.of(1955, 2, 24, 16, 45, 33)),//[1][1]
+                    asList(LocalDateTime.of(1994, 6, 19, 6, 33, 56), LocalDateTime.of(1971, 11, 5, 3, 27, 55), LocalDateTime.of(1925, 4, 11, 11, 55, 48)),//[1][2]
+                    asList(LocalDateTime.of(1916, 10, 2, 3, 7, 14), LocalDateTime.of(1995, 4, 11, 15, 5, 51), LocalDateTime.of(1973, 11, 17, 3, 6, 53))//[1][3]
+                ),
+                asList( // [2]
+                    asList(LocalDateTime.of(1929, 12, 19, 14, 49, 8), LocalDateTime.of(1942, 10, 28, 2, 55, 13), LocalDateTime.of(1936, 12, 1, 10, 1, 37)),//[2][0]
+                    asList(LocalDateTime.of(1926, 12, 9, 5, 34, 14), LocalDateTime.of(1971, 7, 23, 12, 1, 0), LocalDateTime.of(2014, 1, 7, 4, 29, 3)),//[2][1]
+                    asList(LocalDateTime.of(2012, 8, 25, 20, 26, 10)),//[2][2]
+                    asList(LocalDateTime.of(2010, 3, 4, 6, 31, 54), LocalDateTime.of(1950, 7, 20, 16, 26, 8), LocalDateTime.of(1953, 3, 16, 13, 13, 24))//[2][3]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(LocalDateTime.of(1904, 12, 9, 22, 37, 10)),//[0][0]
+                    asList(LocalDateTime.of(1994, 4, 12, 20, 6, 7)),//[0][1]
+                    asList(LocalDateTime.of(1954, 7, 5, 20, 48, 9), LocalDateTime.of(1913, 3, 3, 16, 45, 10), LocalDateTime.of(1960, 4, 30, 19, 35, 28)),//[0][2]
+                    asList(LocalDateTime.of(1962, 9, 26, 14, 11, 12), LocalDateTime.of(1906, 6, 18, 2, 3, 17), LocalDateTime.of(2003, 6, 19, 2, 15, 24))//[0][3]
+                ),
+                asList( // [1]
+                    asList(LocalDateTime.of(1929, 3, 20, 4, 33, 40), LocalDateTime.of(1939, 2, 12, 4, 3, 7), LocalDateTime.of(1945, 2, 16, 18, 18, 16))//[1][0]
+                ),
+                asList( // [2]
+                    asList(LocalDateTime.of(1969, 8, 11, 19, 25, 31), LocalDateTime.of(1944, 8, 10, 23, 57, 58)),//[2][0]
+                    asList(LocalDateTime.of(1989, 3, 18, 10, 33, 56), LocalDateTime.of(1961, 6, 6, 1, 44, 50))//[2][1]
+                )
+            )
+        )
+        .baselineValuesForSingleColumn(
+            asList( // row
+                asList( // [0]
+                    asList(LocalDateTime.of(1999, 12, 6, 23, 16, 45)),//[0][0]
+                    asList(LocalDateTime.of(1903, 12, 11, 2, 26, 16), LocalDateTime.of(2007, 1, 3, 17, 27, 28)),//[0][1]
+                    asList(LocalDateTime.of(2018, 3, 16, 13, 43, 19), LocalDateTime.of(2002, 9, 16, 5, 58, 40), LocalDateTime.of(1956, 5, 16, 14, 47, 44)),//[0][2]
+                    asList(LocalDateTime.of(2006, 9, 19, 15, 38, 19), LocalDateTime.of(2016, 1, 21, 10, 39, 30))//[0][3]
+                )
+            )
+        )
+        .go();
+  }
+
+  @Test
+  public void hiveTinyintArray() throws Exception {
+    // Nesting 0: reading ARRAY<TINYINT>
+    testBuilder()
+        .sqlQuery("SELECT tinyint_arr_n_0 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("tinyint_arr_n_0")
+        .baselineValuesForSingleColumn(asList(-128, 0, 127))
+        .baselineValuesForSingleColumn(emptyList())
+        .baselineValuesForSingleColumn(asList(-101))
+        .go();
+
+    // Nesting 1: reading ARRAY<ARRAY<TINYINT>>
+    testBuilder()
+        .sqlQuery("SELECT tinyint_arr_n_1 FROM cp.`parquet2/hive_arrays_p.parquet`")
+        .unOrdered()
+        .baselineColumns("tinyint_arr_n_1")
+        .baselineValuesForSingleColumn(asList(asList(-128, -127), asList(0, 1), asList(127, 126)))
+        .baselineValuesForSingleColumn(asList(emptyList(), emptyList()))
+        .baselineValuesForSingleColumn(asList(asList(-102)))
+        .go();
+
+    // Nesting 2: reading ARRAY<ARRAY<ARRAY<TINYINT>>>
+    testBuilder()
+        .sqlQuery("SELECT tinyint_arr_n_2 FROM cp.`parquet2/hive_arrays_p.parquet` order by rid")
+        .ordered()
+        .baselineColumns("tinyint_arr_n_2")
+        .baselineValues(asList(
+            asList(asList(31, 65, 54), asList(66), asList(22), asList(-33, -125, 116)),
+            asList(asList(-5, -10)),
+            asList(asList(78), asList(86), asList(90, 34), asList(32)),
+            asList(asList(103, -49, -33), asList(-30), asList(107, 24, 74), asList(16, -58)),
+            asList(asList(-119, -8), asList(50, -99, 26), asList(-119))
+        ))
+        .baselineValues(asList(
+            asList(asList(-90, -113), asList(71, -65)),
+            asList(asList(88, -83)),
+            asList(asList(11), asList(121, -57)),
+            asList(asList(-79), asList(16, -111, -111), asList(90, 106), asList(33, 29, 42), asList(74))
+        ))
+        .baselineValues(asList(
+            asList(asList(74, -115), asList(19, 85, 3))
+        ))
+        .go();
+  }
+
 }
diff --git a/exec/java-exec/src/test/resources/parquet2/hive_arrays_p.parquet b/exec/java-exec/src/test/resources/parquet2/hive_arrays_p.parquet
new file mode 100755
index 0000000..56767a2
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquet2/hive_arrays_p.parquet differ


[drill] 02/03: DRILL-7297: Query hangs in planning stage when Error is thrown

Posted by am...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit d1086772ee4d9f027d3fd6f4673aae43158d2172
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Thu Jun 20 15:33:00 2019 +0300

    DRILL-7297: Query hangs in planning stage when Error is thrown
    
    close apache/drill#1811
---
 .../apache/drill/exec/work/foreman/Foreman.java    |  8 ++---
 .../java/org/apache/drill/TestFunctionsQuery.java  |  8 +++++
 .../exec/fn/impl/testing/CustomErrorFunction.java  | 42 ++++++++++++++++++++++
 3 files changed, 53 insertions(+), 5 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index ce03303..804254b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -285,9 +285,9 @@ public class Foreman implements Runnable {
         throw new IllegalStateException();
       }
       injector.injectChecked(queryContext.getExecutionControls(), "run-try-end", ForemanException.class);
-    } catch (final ForemanException e) {
+    } catch (ForemanException | UserException e) {
       queryStateProcessor.moveToState(QueryState.FAILED, e);
-    } catch (final OutOfMemoryError | OutOfMemoryException e) {
+    } catch (OutOfMemoryError | OutOfMemoryException e) {
       if (FailureUtils.isDirectMemoryOOM(e)) {
         queryStateProcessor.moveToState(QueryState.FAILED, UserException.memoryError(e).build(logger));
       } else {
@@ -298,9 +298,7 @@ public class Foreman implements Runnable {
          */
         FailureUtils.unrecoverableFailure(e, "Unable to handle out of memory condition in Foreman.", EXIT_CODE_HEAP_OOM);
       }
-    } catch (UserException e) {
-      queryStateProcessor.moveToState(QueryState.FAILED, e);
-    } catch (AssertionError | Exception ex) {
+    } catch (Throwable ex) {
       queryStateProcessor.moveToState(QueryState.FAILED,
           new ForemanException("Unexpected exception during fragment initialization: " + ex.getMessage(), ex));
     } finally {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
index 16d9789..ae08d92 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
@@ -18,6 +18,7 @@
 package org.apache.drill;
 
 import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
+import static org.hamcrest.CoreMatchers.containsString;
 
 import java.math.BigDecimal;
 import java.time.Instant;
@@ -1008,4 +1009,11 @@ public class TestFunctionsQuery extends BaseTestQuery {
           .go();
     }
   }
+
+  @Test // DRILL-7297
+  public void testErrorInUdf() throws Exception {
+    expectedException.expect(UserRemoteException.class);
+    expectedException.expectMessage(containsString("Error from UDF"));
+    test("select error_function()");
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/CustomErrorFunction.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/CustomErrorFunction.java
new file mode 100644
index 0000000..c372e7b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/testing/CustomErrorFunction.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.fn.impl.testing;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+@FunctionTemplate(
+    name="error_function",
+    scope = FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomErrorFunction implements DrillSimpleFunc {
+
+  @Output
+  VarCharHolder output;
+
+  public void setup() {
+  }
+
+  public void eval() {
+    throw new Error("Error from UDF");
+  }
+}
+


[drill] 03/03: DRILL-7302: Bump Apache Avro to 1.9.0

Posted by am...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 205e0289940e8b8342f3ae812640da331381ff16
Author: Fokko Driesprong <fo...@apache.org>
AuthorDate: Thu Jun 20 15:41:57 2019 +0200

    DRILL-7302: Bump Apache Avro to 1.9.0
    
    Apache Avro 1.9.0 brings a lot of new features:
    
    Deprecate Joda-Time in favor of Java8 JSR310 and setting it as default
    Remove support for Hadoop 1.x
    Move from Jackson 1.x to 2.9
    Add ZStandard Codec
    Lots of updates on the dependencies to fix CVE's
    Remove Jackson classes from public API
    Apache Avro is built by default with Java 8
    Apache Avro is compiled and tested with Java 11 to guarantee compatibility
    Apache Avro MapReduce is compiled and tested with Hadoop 3
    Apache Avro is now leaner, multiple dependencies were removed: guava, paranamer, commons-codec, and commons-logging
    and many, many more!
    
    close apache/drill#1812
---
 .../org/apache/drill/exec/resourcemgr/config/selectors/AclSelector.java | 2 +-
 .../drill/exec/resourcemgr/config/selectors/NotEqualSelector.java       | 2 +-
 .../drill/exec/physical/impl/join/TestLateralJoinCorrectness.java       | 2 +-
 .../physical/impl/join/TestLateralJoinCorrectnessBatchProcessing.java   | 2 +-
 .../java/org/apache/drill/exec/physical/impl/scan/ScanTestUtils.java    | 2 +-
 pom.xml                                                                 | 2 +-
 6 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/AclSelector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/AclSelector.java
index 50acb86..9c6d7c5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/AclSelector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/AclSelector.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.resourcemgr.config.selectors;
 
-import avro.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
 import com.typesafe.config.Config;
 import org.apache.drill.exec.ops.QueryContext;
 import org.apache.drill.exec.resourcemgr.config.exception.RMConfigException;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/NotEqualSelector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/NotEqualSelector.java
index 653c3b5..bb6da02 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/NotEqualSelector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/resourcemgr/config/selectors/NotEqualSelector.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.resourcemgr.config.selectors;
 
-import avro.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
 import com.typesafe.config.Config;
 import org.apache.drill.exec.ops.QueryContext;
 import org.apache.drill.exec.resourcemgr.config.exception.RMConfigException;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectness.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectness.java
index 4d4521d..ff1de9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectness.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectness.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.join;
 
-import avro.shaded.com.google.common.collect.Lists;
+import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectnessBatchProcessing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectnessBatchProcessing.java
index 9eee038..ae9003c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectnessBatchProcessing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestLateralJoinCorrectnessBatchProcessing.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.join;
 
-import avro.shaded.com.google.common.collect.Lists;
+import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.ops.OperatorContext;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/ScanTestUtils.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/ScanTestUtils.java
index de7b6c2..1e99b79 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/ScanTestUtils.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/ScanTestUtils.java
@@ -42,7 +42,7 @@ import org.apache.drill.exec.record.metadata.TupleSchema;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.apache.drill.test.OperatorFixture;
 
-import avro.shaded.com.google.common.collect.Lists;
+import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
 public class ScanTestUtils {
 
diff --git a/pom.xml b/pom.xml
index e141ea0..550d8ff 100644
--- a/pom.xml
+++ b/pom.xml
@@ -83,7 +83,7 @@
     <javassist.version>3.24.0-GA</javassist.version>
     <msgpack.version>0.6.6</msgpack.version>
     <reflections.version>0.9.10</reflections.version>
-    <avro.version>1.8.2</avro.version>
+    <avro.version>1.9.0</avro.version>
     <metrics.version>4.0.2</metrics.version>
     <jetty.version>9.3.25.v20180904</jetty.version>
     <jersey.version>2.25.1</jersey.version>