You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@parquet.apache.org by zi...@apache.org on 2019/05/01 14:14:24 UTC

[parquet-mr] branch master updated: PARQUET-1558: Use try-with-resource in Apache Avro tests (#634)

This is an automated email from the ASF dual-hosted git repository.

zivanfi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-mr.git


The following commit(s) were added to refs/heads/master by this push:
     new 12f3fd2  PARQUET-1558: Use try-with-resource in Apache Avro tests (#634)
12f3fd2 is described below

commit 12f3fd2c3e8349f54ffc8814f083c8b001701f25
Author: Fokko Driesprong <fo...@driesprong.frl>
AuthorDate: Wed May 1 16:14:19 2019 +0200

    PARQUET-1558: Use try-with-resource in Apache Avro tests (#634)
    
    We can use the try-with-resource pattern to implicitly close the
    resources such as readers and writers, provided by Avro and Parquet
---
 .../apache/parquet/avro/TestInputOutputFormat.java |  20 +-
 .../org/apache/parquet/avro/TestReadWrite.java     | 247 ++++++++++-----------
 .../parquet/avro/TestReadWriteOldListBehavior.java | 133 +++++------
 .../parquet/avro/TestReflectInputOutputFormat.java |  70 +++---
 .../apache/parquet/avro/TestReflectReadWrite.java  |  32 +--
 .../avro/TestSpecificInputOutputFormat.java        |  70 +++---
 .../apache/parquet/avro/TestSpecificReadWrite.java | 141 ++++++------
 .../apache/parquet/avro/TestStringBehavior.java    |  77 ++-----
 8 files changed, 378 insertions(+), 412 deletions(-)

diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestInputOutputFormat.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestInputOutputFormat.java
index 7ba6c9b..6b1c6d3 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestInputOutputFormat.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestInputOutputFormat.java
@@ -117,17 +117,17 @@ public class TestInputOutputFormat {
       waitForJob(job);
     }
 
-    final BufferedReader out = new BufferedReader(new FileReader(new File(outputPath.toString(), "part-m-00000")));
-    String lineOut = null;
-    int lineNumber = 0;
-    while ((lineOut = out.readLine()) != null) {
-      lineOut = lineOut.substring(lineOut.indexOf("\t") + 1);
-      GenericRecord a = nextRecord(lineNumber == 4 ? null : lineNumber);
-      assertEquals("line " + lineNumber, a.toString(), lineOut);
-      ++ lineNumber;
+    try(final BufferedReader out = new BufferedReader(new FileReader(new File(outputPath.toString(), "part-m-00000")))) {
+      String lineOut;
+      int lineNumber = 0;
+      while ((lineOut = out.readLine()) != null) {
+        lineOut = lineOut.substring(lineOut.indexOf("\t") + 1);
+        GenericRecord a = nextRecord(lineNumber == 4 ? null : lineNumber);
+        assertEquals("line " + lineNumber, a.toString(), lineOut);
+        ++lineNumber;
+      }
+      assertNull("line " + lineNumber, out.readLine());
     }
-    assertNull("line " + lineNumber, out.readLine());
-    out.close();
   }
 
   private void waitForJob(Job job) throws Exception {
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWrite.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWrite.java
index 4368938..396b8a4 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWrite.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWrite.java
@@ -117,19 +117,19 @@ public class TestReadWrite {
         Resources.getResource("map.avsc").openStream());
 
     Path file = new Path(createTempFile().getPath());
+    ImmutableMap<String, Integer> emptyMap = new ImmutableMap.Builder<String, Integer>().build();
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(file)
         .withSchema(schema)
         .withConf(testConf)
-        .build();
+        .build()) {
 
-    // Write a record with an empty map.
-    ImmutableMap emptyMap = new ImmutableMap.Builder<String, Integer>().build();
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      // Write a record with an empty map.
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", emptyMap).build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -145,22 +145,22 @@ public class TestReadWrite {
 
     Path file = new Path(createTempFile().getPath());
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
-        .<GenericRecord>builder(file)
-        .withSchema(schema)
-        .withConf(testConf)
-        .build();
-
     // Write a record with a null value
-    Map<CharSequence, Integer> map = new HashMap<CharSequence, Integer>();
+    Map<CharSequence, Integer> map = new HashMap<>();
     map.put(str("thirty-four"), 34);
     map.put(str("eleventy-one"), null);
     map.put(str("one-hundred"), 100);
 
-    GenericData.Record record = new GenericRecordBuilder(schema)
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
+        .<GenericRecord>builder(file)
+        .withSchema(schema)
+        .withConf(testConf)
+        .build()) {
+
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", map).build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -177,21 +177,22 @@ public class TestReadWrite {
 
     Path file = new Path(createTempFile().getPath());
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(file)
         .withSchema(schema)
         .withConf(testConf)
-        .build();
+        .build()) {
 
-    // Write a record with a null value
-    Map<String, Integer> map = new HashMap<String, Integer>();
-    map.put("thirty-four", 34);
-    map.put("eleventy-one", null);
-    map.put("one-hundred", 100);
+      // Write a record with a null value
+      Map<String, Integer> map = new HashMap<String, Integer>();
+      map.put("thirty-four", 34);
+      map.put("eleventy-one", null);
+      map.put("one-hundred", 100);
 
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", map).build();
-    writer.write(record);
+      writer.write(record);
+    }
   }
 
   @Test
@@ -201,18 +202,18 @@ public class TestReadWrite {
 
     Path file = new Path(createTempFile().getPath());
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(file)
         .withSchema(schema)
         .withConf(testConf)
-        .build();
+        .build()) {
 
-    // Write a record with a map with Utf8 keys.
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      // Write a record with a map with Utf8 keys.
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", ImmutableMap.of(new Utf8("a"), 1, new Utf8("b"), 2))
         .build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -239,37 +240,37 @@ public class TestReadWrite {
     File file = temp.newFile("decimal.parquet");
     file.delete();
     Path path = new Path(file.toString());
+    List<GenericRecord> expected = Lists.newArrayList();
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(path)
         .withDataModel(decimalSupport)
         .withSchema(decimalSchema)
-        .build();
+        .build()) {
 
-    Random random = new Random(34L);
-    GenericRecordBuilder builder = new GenericRecordBuilder(decimalSchema);
-    List<GenericRecord> expected = Lists.newArrayList();
-    for (int i = 0; i < 1000; i += 1) {
-      BigDecimal dec = new BigDecimal(new BigInteger(31, random), 2);
-      builder.set("dec", dec);
+      Random random = new Random(34L);
+      GenericRecordBuilder builder = new GenericRecordBuilder(decimalSchema);
+      for (int i = 0; i < 1000; i += 1) {
+        BigDecimal dec = new BigDecimal(new BigInteger(31, random), 2);
+        builder.set("dec", dec);
 
-      GenericRecord rec = builder.build();
-      expected.add(rec);
-      writer.write(builder.build());
+        GenericRecord rec = builder.build();
+        expected.add(rec);
+        writer.write(builder.build());
+      }
     }
-    writer.close();
+    List<GenericRecord> records = Lists.newArrayList();
 
-    ParquetReader<GenericRecord> reader = AvroParquetReader
+    try(ParquetReader<GenericRecord> reader = AvroParquetReader
         .<GenericRecord>builder(path)
         .withDataModel(decimalSupport)
         .disableCompatibility()
-        .build();
-    List<GenericRecord> records = Lists.newArrayList();
-    GenericRecord rec;
-    while ((rec = reader.read()) != null) {
-      records.add(rec);
+        .build()) {
+      GenericRecord rec;
+      while ((rec = reader.read()) != null) {
+        records.add(rec);
+      }
     }
-    reader.close();
 
     Assert.assertTrue("dec field should be a BigDecimal instance",
         records.get(0).get("dec") instanceof BigDecimal);
@@ -291,37 +292,37 @@ public class TestReadWrite {
     File file = temp.newFile("decimal.parquet");
     file.delete();
     Path path = new Path(file.toString());
+    List<GenericRecord> expected = Lists.newArrayList();
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(path)
         .withDataModel(decimalSupport)
         .withSchema(decimalSchema)
-        .build();
+        .build()) {
 
-    Random random = new Random(34L);
-    GenericRecordBuilder builder = new GenericRecordBuilder(decimalSchema);
-    List<GenericRecord> expected = Lists.newArrayList();
-    for (int i = 0; i < 1000; i += 1) {
-      BigDecimal dec = new BigDecimal(new BigInteger(31, random), 2);
-      builder.set("dec", dec);
+      Random random = new Random(34L);
+      GenericRecordBuilder builder = new GenericRecordBuilder(decimalSchema);
+      for (int i = 0; i < 1000; i += 1) {
+        BigDecimal dec = new BigDecimal(new BigInteger(31, random), 2);
+        builder.set("dec", dec);
 
-      GenericRecord rec = builder.build();
-      expected.add(rec);
-      writer.write(builder.build());
+        GenericRecord rec = builder.build();
+        expected.add(rec);
+        writer.write(builder.build());
+      }
     }
-    writer.close();
+    List<GenericRecord> records = Lists.newArrayList();
 
-    ParquetReader<GenericRecord> reader = AvroParquetReader
+    try(ParquetReader<GenericRecord> reader = AvroParquetReader
         .<GenericRecord>builder(path)
         .withDataModel(decimalSupport)
         .disableCompatibility()
-        .build();
-    List<GenericRecord> records = Lists.newArrayList();
-    GenericRecord rec;
-    while ((rec = reader.read()) != null) {
-      records.add(rec);
+        .build()) {
+      GenericRecord rec;
+      while ((rec = reader.read()) != null) {
+        records.add(rec);
+      }
     }
-    reader.close();
 
     Assert.assertTrue("dec field should be a BigDecimal instance",
         records.get(0).get("dec") instanceof BigDecimal);
@@ -334,35 +335,31 @@ public class TestReadWrite {
         Resources.getResource("all.avsc").openStream());
 
     Path file = new Path(createTempFile().getPath());
+    List<Integer> integerArray = Arrays.asList(1, 2, 3);
+    GenericData.Record nestedRecord = new GenericRecordBuilder(
+      schema.getField("mynestedrecord").schema())
+      .set("mynestedint", 1).build();
+    List<Integer> emptyArray = new ArrayList<Integer>();
+    Schema arrayOfOptionalIntegers = Schema.createArray(
+      optional(Schema.create(Schema.Type.INT)));
+    GenericData.Array<Integer> genericIntegerArrayWithNulls =
+      new GenericData.Array<Integer>(
+        arrayOfOptionalIntegers,
+        Arrays.asList(1, null, 2, null, 3));
+    GenericFixed genericFixed = new GenericData.Fixed(
+      Schema.createFixed("fixed", null, null, 1), new byte[]{(byte) 65});
+    ImmutableMap<String, Integer> emptyMap = new ImmutableMap.Builder<String, Integer>().build();
 
-    ParquetWriter<GenericRecord> writer = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> writer = AvroParquetWriter
         .<GenericRecord>builder(file)
         .withSchema(schema)
         .withConf(testConf)
-        .build();
+        .build()) {
 
-    GenericData.Record nestedRecord = new GenericRecordBuilder(
-        schema.getField("mynestedrecord").schema())
-            .set("mynestedint", 1).build();
-
-    List<Integer> integerArray = Arrays.asList(1, 2, 3);
-    GenericData.Array<Integer> genericIntegerArray = new GenericData.Array<Integer>(
+      GenericData.Array<Integer> genericIntegerArray = new GenericData.Array<Integer>(
         Schema.createArray(Schema.create(Schema.Type.INT)), integerArray);
 
-    GenericFixed genericFixed = new GenericData.Fixed(
-        Schema.createFixed("fixed", null, null, 1), new byte[] { (byte) 65 });
-
-    List<Integer> emptyArray = new ArrayList<Integer>();
-    ImmutableMap emptyMap = new ImmutableMap.Builder<String, Integer>().build();
-
-    Schema arrayOfOptionalIntegers = Schema.createArray(
-        optional(Schema.create(Schema.Type.INT)));
-    GenericData.Array<Integer> genericIntegerArrayWithNulls =
-        new GenericData.Array<Integer>(
-            arrayOfOptionalIntegers,
-            Arrays.asList(1, null, 2, null, 3));
-
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mynull", null)
         .set("myboolean", true)
         .set("myint", 1)
@@ -382,11 +379,13 @@ public class TestReadWrite {
         .set("myfixed", genericFixed)
         .build();
 
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
-    AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
-    GenericRecord nextRecord = reader.read();
+    final GenericRecord nextRecord;
+    try(AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file)) {
+      nextRecord = reader.read();
+    }
 
     Object expectedEnumSymbol = compat ? "a" :
         new GenericData.EnumSymbol(schema.getField("myenum").schema(), "a");
@@ -416,7 +415,7 @@ public class TestReadWrite {
     Path file = new Path(createTempFile().getPath());
 
     // write file using Parquet APIs
-    ParquetWriter<Map<String, Object>> parquetWriter = new ParquetWriter<Map<String, Object>>(file,
+    try(ParquetWriter<Map<String, Object>> parquetWriter = new ParquetWriter<>(file,
         new WriteSupport<Map<String, Object>>() {
 
       private RecordConsumer recordConsumer;
@@ -560,26 +559,26 @@ public class TestReadWrite {
 
         recordConsumer.endMessage();
       }
-    });
-    Map<String, Object> record = new HashMap<String, Object>();
-    record.put("myboolean", true);
-    record.put("myint", 1);
-    record.put("mylong", 2L);
-    record.put("myfloat", 3.1f);
-    record.put("mydouble", 4.1);
-    record.put("mybytes", ByteBuffer.wrap("hello".getBytes(StandardCharsets.UTF_8)));
-    record.put("mystring", "hello");
-    record.put("myenum", "a");
-    record.put("mynestedint", 1);
-    record.put("myarray", new int[] {1, 2, 3});
-    record.put("myoptionalarray", new int[]{1, 2, 3});
-    record.put("myarrayofoptional", new Integer[] {1, null, 2, null, 3});
-    record.put("myrecordarraya", new int[] {1, 2, 3});
-    record.put("myrecordarrayb", new int[] {4, 5, 6});
-    record.put("mymap", ImmutableMap.of("a", 1, "b", 2));
-    record.put("myfixed", new byte[] { (byte) 65 });
-    parquetWriter.write(record);
-    parquetWriter.close();
+    })) {
+      Map<String, Object> record = new HashMap<String, Object>();
+      record.put("myboolean", true);
+      record.put("myint", 1);
+      record.put("mylong", 2L);
+      record.put("myfloat", 3.1f);
+      record.put("mydouble", 4.1);
+      record.put("mybytes", ByteBuffer.wrap("hello".getBytes(StandardCharsets.UTF_8)));
+      record.put("mystring", "hello");
+      record.put("myenum", "a");
+      record.put("mynestedint", 1);
+      record.put("myarray", new int[]{1, 2, 3});
+      record.put("myoptionalarray", new int[]{1, 2, 3});
+      record.put("myarrayofoptional", new Integer[]{1, null, 2, null, 3});
+      record.put("myrecordarraya", new int[]{1, 2, 3});
+      record.put("myrecordarrayb", new int[]{4, 5, 6});
+      record.put("mymap", ImmutableMap.of("a", 1, "b", 2));
+      record.put("myfixed", new byte[]{(byte) 65});
+      parquetWriter.write(record);
+    }
 
     Schema nestedRecordSchema = Schema.createRecord("mynestedrecord", null, null, false);
     nestedRecordSchema.setFields(Arrays.asList(
@@ -638,16 +637,16 @@ public class TestReadWrite {
     Path file = new Path(createTempFile().getPath());
 
     // Parquet writer
-    ParquetWriter parquetWriter = AvroParquetWriter.builder(file).withSchema(avroSchema)
+    try(ParquetWriter parquetWriter = AvroParquetWriter.builder(file).withSchema(avroSchema)
       .withConf(new Configuration())
-      .build();
+      .build()) {
 
-    GenericRecord record = new GenericRecordBuilder(avroSchema)
-      .set("value", "theValue")
-      .build();
+      GenericRecord record = new GenericRecordBuilder(avroSchema)
+        .set("value", "theValue")
+        .build();
 
-    parquetWriter.write(record);
-    parquetWriter.close();
+      parquetWriter.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWriteOldListBehavior.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWriteOldListBehavior.java
index bcf553e..4542ef2 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWriteOldListBehavior.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReadWriteOldListBehavior.java
@@ -85,16 +85,14 @@ public class TestReadWriteOldListBehavior {
     tmp.deleteOnExit();
     tmp.delete();
     Path file = new Path(tmp.getPath());
-
-    AvroParquetWriter<GenericRecord> writer =
-        new AvroParquetWriter<GenericRecord>(file, schema);
-
-    // Write a record with an empty array.
     List<Integer> emptyArray = new ArrayList<Integer>();
-    GenericData.Record record = new GenericRecordBuilder(schema)
+
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(file, schema)) {
+      // Write a record with an empty array.
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("myarray", emptyArray).build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -112,16 +110,14 @@ public class TestReadWriteOldListBehavior {
     tmp.deleteOnExit();
     tmp.delete();
     Path file = new Path(tmp.getPath());
-
-    AvroParquetWriter<GenericRecord> writer = 
-        new AvroParquetWriter<GenericRecord>(file, schema);
-
-    // Write a record with an empty map.
     ImmutableMap emptyMap = new ImmutableMap.Builder<String, Integer>().build();
-    GenericData.Record record = new GenericRecordBuilder(schema)
+
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(file, schema)) {
+      // Write a record with an empty map.
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", emptyMap).build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -139,20 +135,18 @@ public class TestReadWriteOldListBehavior {
     tmp.deleteOnExit();
     tmp.delete();
     Path file = new Path(tmp.getPath());
+    Map<CharSequence, Integer> map = new HashMap<>();
 
-    AvroParquetWriter<GenericRecord> writer =
-        new AvroParquetWriter<GenericRecord>(file, schema);
-
-    // Write a record with a null value
-    Map<CharSequence, Integer> map = new HashMap<CharSequence, Integer>();
-    map.put(str("thirty-four"), 34);
-    map.put(str("eleventy-one"), null);
-    map.put(str("one-hundred"), 100);
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(file, schema)) {
+      // Write a record with a null value
+      map.put(str("thirty-four"), 34);
+      map.put(str("eleventy-one"), null);
+      map.put(str("one-hundred"), 100);
 
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", map).build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -172,18 +166,17 @@ public class TestReadWriteOldListBehavior {
     tmp.delete();
     Path file = new Path(tmp.getPath());
 
-    AvroParquetWriter<GenericRecord> writer =
-        new AvroParquetWriter<GenericRecord>(file, schema);
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(file, schema)) {
+      // Write a record with a null value
+      Map<String, Integer> map = new HashMap<String, Integer>();
+      map.put("thirty-four", 34);
+      map.put("eleventy-one", null);
+      map.put("one-hundred", 100);
 
-    // Write a record with a null value
-    Map<String, Integer> map = new HashMap<String, Integer>();
-    map.put("thirty-four", 34);
-    map.put("eleventy-one", null);
-    map.put("one-hundred", 100);
-
-    GenericData.Record record = new GenericRecordBuilder(schema)
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", map).build();
-    writer.write(record);
+      writer.write(record);
+    }
   }
 
   @Test
@@ -196,15 +189,13 @@ public class TestReadWriteOldListBehavior {
     tmp.delete();
     Path file = new Path(tmp.getPath());
 
-    AvroParquetWriter<GenericRecord> writer = 
-        new AvroParquetWriter<GenericRecord>(file, schema);
-
-    // Write a record with a map with Utf8 keys.
-    GenericData.Record record = new GenericRecordBuilder(schema)
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(file, schema)) {
+      // Write a record with a map with Utf8 keys.
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mymap", ImmutableMap.of(new Utf8("a"), 1, new Utf8("b"), 2))
         .build();
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -222,9 +213,6 @@ public class TestReadWriteOldListBehavior {
     tmp.deleteOnExit();
     tmp.delete();
     Path file = new Path(tmp.getPath());
-    
-    AvroParquetWriter<GenericRecord> writer = new
-        AvroParquetWriter<GenericRecord>(file, schema);
 
     GenericData.Record nestedRecord = new GenericRecordBuilder(
         schema.getField("mynestedrecord").schema())
@@ -235,12 +223,13 @@ public class TestReadWriteOldListBehavior {
         Schema.createArray(Schema.create(Schema.Type.INT)), integerArray);
 
     GenericFixed genericFixed = new GenericData.Fixed(
-        Schema.createFixed("fixed", null, null, 1), new byte[] { (byte) 65 });
+        Schema.createFixed("fixed", null, null, 1), new byte[]{(byte) 65});
 
     List<Integer> emptyArray = new ArrayList<Integer>();
     ImmutableMap emptyMap = new ImmutableMap.Builder<String, Integer>().build();
 
-    GenericData.Record record = new GenericRecordBuilder(schema)
+    try(AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<>(file, schema)) {
+      GenericData.Record record = new GenericRecordBuilder(schema)
         .set("mynull", null)
         .set("myboolean", true)
         .set("myint", 1)
@@ -260,8 +249,8 @@ public class TestReadWriteOldListBehavior {
         .set("myfixed", genericFixed)
         .build();
 
-    writer.write(record);
-    writer.close();
+      writer.write(record);
+    }
 
     AvroParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(testConf, file);
     GenericRecord nextRecord = reader.read();
@@ -362,7 +351,7 @@ public class TestReadWriteOldListBehavior {
     Path file = new Path(tmp.getPath());
 
     // write file using Parquet APIs
-    ParquetWriter<Map<String, Object>> parquetWriter = new ParquetWriter<Map<String, Object>>(file,
+    try(ParquetWriter<Map<String, Object>> parquetWriter = new ParquetWriter<Map<String, Object>>(file,
         new WriteSupport<Map<String, Object>>() {
 
       private RecordConsumer recordConsumer;
@@ -505,26 +494,26 @@ public class TestReadWriteOldListBehavior {
 
         recordConsumer.endMessage();
       }
-    });
-    Map<String, Object> record = new HashMap<String, Object>();
-    record.put("myboolean", true);
-    record.put("myint", 1);
-    record.put("mylong", 2L);
-    record.put("myfloat", 3.1f);
-    record.put("mydouble", 4.1);
-    record.put("mybytes", ByteBuffer.wrap("hello".getBytes(StandardCharsets.UTF_8)));
-    record.put("mystring", "hello");
-    record.put("myenum", "a");
-    record.put("mynestedint", 1);
-    record.put("myarray", new int[] {1, 2, 3});
-    record.put("myoptionalarray", new int[]{1, 2, 3});
-    record.put("myarrayofoptional", new Integer[] {1, null, 2, null, 3});
-    record.put("myrecordarraya", new int[] {1, 2, 3});
-    record.put("myrecordarrayb", new int[] {4, 5, 6});
-    record.put("mymap", ImmutableMap.of("a", 1, "b", 2));
-    record.put("myfixed", new byte[] { (byte) 65 });
-    parquetWriter.write(record);
-    parquetWriter.close();
+    })) {
+      Map<String, Object> record = new HashMap<String, Object>();
+      record.put("myboolean", true);
+      record.put("myint", 1);
+      record.put("mylong", 2L);
+      record.put("myfloat", 3.1f);
+      record.put("mydouble", 4.1);
+      record.put("mybytes", ByteBuffer.wrap("hello".getBytes(StandardCharsets.UTF_8)));
+      record.put("mystring", "hello");
+      record.put("myenum", "a");
+      record.put("mynestedint", 1);
+      record.put("myarray", new int[]{1, 2, 3});
+      record.put("myoptionalarray", new int[]{1, 2, 3});
+      record.put("myarrayofoptional", new Integer[]{1, null, 2, null, 3});
+      record.put("myrecordarraya", new int[]{1, 2, 3});
+      record.put("myrecordarrayb", new int[]{4, 5, 6});
+      record.put("mymap", ImmutableMap.of("a", 1, "b", 2));
+      record.put("myfixed", new byte[]{(byte) 65});
+      parquetWriter.write(record);
+    }
 
     Schema nestedRecordSchema = Schema.createRecord("mynestedrecord", null, null, false);
     nestedRecordSchema.setFields(Arrays.asList(
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java
index 74f949e..e3f995b 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectInputOutputFormat.java
@@ -397,29 +397,29 @@ public class TestReflectInputOutputFormat {
 
     final Path mapperOutput = new Path(outputPath.toString(),
         "part-m-00000.parquet");
-    final AvroParquetReader<Car> out = new AvroParquetReader<Car>(conf, mapperOutput);
-    Car car;
-    Car previousCar = null;
-    int lineNumber = 0;
-    while ((car = out.read()) != null) {
-      if (previousCar != null) {
-         // Testing reference equality here. The "model" field should be dictionary-encoded.
-         assertTrue(car.model == previousCar.model);
+    try(final AvroParquetReader<Car> out = new AvroParquetReader<Car>(conf, mapperOutput)) {
+      Car car;
+      Car previousCar = null;
+      int lineNumber = 0;
+      while ((car = out.read()) != null) {
+        if (previousCar != null) {
+          // Testing reference equality here. The "model" field should be dictionary-encoded.
+          assertTrue(car.model == previousCar.model);
+        }
+        // Make sure that predicate push down worked as expected
+        if (car.engine.type == EngineType.PETROL) {
+          fail("UnboundRecordFilter failed to remove cars with PETROL engines");
+        }
+        // Note we use lineNumber * 2 because of predicate push down
+        Car expectedCar = nextRecord(lineNumber * 2);
+        // We removed the optional extra field using projection so we shouldn't
+        // see it here...
+        expectedCar.optionalExtra = null;
+        assertEquals("line " + lineNumber, expectedCar, car);
+        ++lineNumber;
+        previousCar = car;
       }
-      // Make sure that predicate push down worked as expected
-      if (car.engine.type == EngineType.PETROL) {
-        fail("UnboundRecordFilter failed to remove cars with PETROL engines");
-      }
-      // Note we use lineNumber * 2 because of predicate push down
-      Car expectedCar = nextRecord(lineNumber * 2);
-      // We removed the optional extra field using projection so we shouldn't
-      // see it here...
-      expectedCar.optionalExtra = null;
-      assertEquals("line " + lineNumber, expectedCar, car);
-      ++lineNumber;
-      previousCar = car;
     }
-    out.close();
   }
 
   @Test
@@ -458,21 +458,21 @@ public class TestReflectInputOutputFormat {
     waitForJob(job);
 
     final Path mapperOutput = new Path(outputPath.toString(), "part-m-00000.parquet");
-    final AvroParquetReader<ShortCar> out = new AvroParquetReader<ShortCar>(conf, mapperOutput);
-    ShortCar car;
-    int lineNumber = 0;
-    while ((car = out.read()) != null) {
-      // Make sure that predicate push down worked as expected
-      // Note we use lineNumber * 2 because of predicate push down
-      Car expectedCar = nextRecord(lineNumber * 2);
-      // We removed the optional extra field using projection so we shouldn't see it here...
-      assertNull(car.make);
-      assertEquals(car.engine, expectedCar.engine);
-      assertEquals(car.year, expectedCar.year);
-      assertArrayEquals(car.vin, expectedCar.vin);
-      ++lineNumber;
+    try(final AvroParquetReader<ShortCar> out = new AvroParquetReader<ShortCar>(conf, mapperOutput)) {
+      ShortCar car;
+      int lineNumber = 0;
+      while ((car = out.read()) != null) {
+        // Make sure that predicate push down worked as expected
+        // Note we use lineNumber * 2 because of predicate push down
+        Car expectedCar = nextRecord(lineNumber * 2);
+        // We removed the optional extra field using projection so we shouldn't see it here...
+        assertNull(car.make);
+        assertEquals(car.engine, expectedCar.engine);
+        assertEquals(car.year, expectedCar.year);
+        assertArrayEquals(car.vin, expectedCar.vin);
+        ++lineNumber;
+      }
     }
-    out.close();
   }
 
   private void waitForJob(Job job) throws Exception {
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectReadWrite.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectReadWrite.java
index c4bf5bd..2c88d05 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectReadWrite.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestReflectReadWrite.java
@@ -50,12 +50,13 @@ public class TestReflectReadWrite {
     AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
 
     Path path = writePojosToParquetFile(10, CompressionCodecName.UNCOMPRESSED, false);
-    ParquetReader<Pojo> reader = new AvroParquetReader<Pojo>(conf, path);
-    Pojo object = getPojo();
-    for (int i = 0; i < 10; i++) {
-      assertEquals(object, reader.read());
+    try(ParquetReader<Pojo> reader = new AvroParquetReader<Pojo>(conf, path)) {
+      Pojo object = getPojo();
+      for (int i = 0; i < 10; i++) {
+        assertEquals(object, reader.read());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   @Test
@@ -65,12 +66,13 @@ public class TestReflectReadWrite {
     AvroReadSupport.setAvroDataSupplier(conf, GenericDataSupplier.class);
 
     Path path = writePojosToParquetFile(2, CompressionCodecName.UNCOMPRESSED, false);
-    ParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(conf, path);
-    GenericRecord object = getGenericPojoUtf8();
-    for (int i = 0; i < 2; i += 1) {
-      assertEquals(object, reader.read());
+    try(ParquetReader<GenericRecord> reader = new AvroParquetReader<GenericRecord>(conf, path)) {
+      GenericRecord object = getGenericPojoUtf8();
+      for (int i = 0; i < 2; i += 1) {
+        assertEquals(object, reader.read());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   private GenericRecord getGenericPojoUtf8() {
@@ -135,16 +137,16 @@ public class TestReflectReadWrite {
     Pojo object = getPojo();
 
     Schema schema = ReflectData.get().getSchema(object.getClass());
-    ParquetWriter<Pojo> writer = AvroParquetWriter.<Pojo>builder(path)
+    try(ParquetWriter<Pojo> writer = AvroParquetWriter.<Pojo>builder(path)
         .withSchema(schema)
         .withCompressionCodec(compression)
         .withDataModel(ReflectData.get())
         .withDictionaryEncoding(enableDictionary)
-        .build();
-    for (int i = 0; i < num; i++) {
-      writer.write(object);
+        .build()) {
+      for (int i = 0; i < num; i++) {
+        writer.write(object);
+      }
     }
-    writer.close();
     return path;
   }
 
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificInputOutputFormat.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificInputOutputFormat.java
index 8e89363..c2ad3f2 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificInputOutputFormat.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificInputOutputFormat.java
@@ -189,29 +189,29 @@ public class TestSpecificInputOutputFormat {
 
     final Path mapperOutput = new Path(outputPath.toString(),
         "part-m-00000.parquet");
-    final AvroParquetReader<Car> out = new AvroParquetReader<Car>(mapperOutput);
-    Car car;
-    Car previousCar = null;
-    int lineNumber = 0;
-    while ((car = out.read()) != null) {
-      if (previousCar != null) {
-         // Testing reference equality here. The "model" field should be dictionary-encoded.
-         assertTrue(car.getModel() == previousCar.getModel());
+    try(final AvroParquetReader<Car> out = new AvroParquetReader<>(mapperOutput)) {
+      Car car;
+      Car previousCar = null;
+      int lineNumber = 0;
+      while ((car = out.read()) != null) {
+        if (previousCar != null) {
+          // Testing reference equality here. The "model" field should be dictionary-encoded.
+          assertTrue(car.getModel() == previousCar.getModel());
+        }
+        // Make sure that predicate push down worked as expected
+        if (car.getEngine().getType() == EngineType.PETROL) {
+          fail("UnboundRecordFilter failed to remove cars with PETROL engines");
+        }
+        // Note we use lineNumber * 2 because of predicate push down
+        Car expectedCar = nextRecord(lineNumber * 2);
+        // We removed the optional extra field using projection so we shouldn't
+        // see it here...
+        expectedCar.setOptionalExtra(null);
+        assertEquals("line " + lineNumber, expectedCar, car);
+        ++lineNumber;
+        previousCar = car;
       }
-      // Make sure that predicate push down worked as expected
-      if (car.getEngine().getType() == EngineType.PETROL) {
-        fail("UnboundRecordFilter failed to remove cars with PETROL engines");
-      }
-      // Note we use lineNumber * 2 because of predicate push down
-      Car expectedCar = nextRecord(lineNumber * 2);
-      // We removed the optional extra field using projection so we shouldn't
-      // see it here...
-      expectedCar.setOptionalExtra(null);
-      assertEquals("line " + lineNumber, expectedCar, car);
-      ++lineNumber;
-      previousCar = car;
     }
-    out.close();
   }
 
   @Test
@@ -249,21 +249,21 @@ public class TestSpecificInputOutputFormat {
     waitForJob(job);
 
     final Path mapperOutput = new Path(outputPath.toString(), "part-m-00000.parquet");
-    final AvroParquetReader<ShortCar> out = new AvroParquetReader<ShortCar>(mapperOutput);
-    ShortCar car;
-    int lineNumber = 0;
-    while ((car = out.read()) != null) {
-      // Make sure that predicate push down worked as expected
-      // Note we use lineNumber * 2 because of predicate push down
-      Car expectedCar = nextRecord(lineNumber * 2);
-      // We removed the optional extra field using projection so we shouldn't see it here...
-      assertNull(car.getMake());
-      assertEquals(car.getEngine(), expectedCar.getEngine());
-      assertEquals(car.getYear(), expectedCar.getYear());
-      assertEquals(car.getVin(), expectedCar.getVin());
-      ++lineNumber;
+    try(final AvroParquetReader<ShortCar> out = new AvroParquetReader<>(mapperOutput)) {
+      ShortCar car;
+      int lineNumber = 0;
+      while ((car = out.read()) != null) {
+        // Make sure that predicate push down worked as expected
+        // Note we use lineNumber * 2 because of predicate push down
+        Car expectedCar = nextRecord(lineNumber * 2);
+        // We removed the optional extra field using projection so we shouldn't see it here...
+        assertNull(car.getMake());
+        assertEquals(car.getEngine(), expectedCar.getEngine());
+        assertEquals(car.getYear(), expectedCar.getYear());
+        assertEquals(car.getVin(), expectedCar.getVin());
+        ++lineNumber;
+      }
     }
-    out.close();
   }
 
   private void waitForJob(Job job) throws Exception {
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificReadWrite.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificReadWrite.java
index c0199fa..fa37d54 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificReadWrite.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestSpecificReadWrite.java
@@ -66,54 +66,59 @@ public class TestSpecificReadWrite {
   @Test
   public void testCompatReadWriteSpecific() throws IOException {
     Path path = writeCarsToParquetFile(10, CompressionCodecName.UNCOMPRESSED, false);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path);
-    for (int i = 0; i < 10; i++) {
-      assertEquals(getVwPolo().toString(), reader.read().toString());
-      assertEquals(getVwPassat().toString(), reader.read().toString());
-      assertEquals(getBmwMini().toString(), reader.read().toString());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path)) {
+      for (int i = 0; i < 10; i++) {
+        assertEquals(getVwPolo().toString(), reader.read().toString());
+        assertEquals(getVwPassat().toString(), reader.read().toString());
+        assertEquals(getBmwMini().toString(), reader.read().toString());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   @Test
   public void testReadWriteSpecificWithDictionary() throws IOException {
     Path path = writeCarsToParquetFile(10, CompressionCodecName.UNCOMPRESSED, true);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path);
-    for (int i = 0; i < 10; i++) {
-      assertEquals(getVwPolo().toString(), reader.read().toString());
-      assertEquals(getVwPassat().toString(), reader.read().toString());
-      assertEquals(getBmwMini().toString(), reader.read().toString());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path)) {
+      for (int i = 0; i < 10; i++) {
+        assertEquals(getVwPolo().toString(), reader.read().toString());
+        assertEquals(getVwPassat().toString(), reader.read().toString());
+        assertEquals(getBmwMini().toString(), reader.read().toString());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   @Test
   public void testFilterMatchesMultiple() throws IOException {
     Path path = writeCarsToParquetFile(10, CompressionCodecName.UNCOMPRESSED, false);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make", equalTo("Volkswagen")));
-    for (int i = 0; i < 10; i++) {
-      assertEquals(getVwPolo().toString(), reader.read().toString());
-      assertEquals(getVwPassat().toString(), reader.read().toString());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path, column("make", equalTo("Volkswagen")))) {
+      for (int i = 0; i < 10; i++) {
+        assertEquals(getVwPolo().toString(), reader.read().toString());
+        assertEquals(getVwPassat().toString(), reader.read().toString());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   @Test
   public void testFilterMatchesMultipleBlocks() throws IOException {
     Path path = writeCarsToParquetFile(10000, CompressionCodecName.UNCOMPRESSED, false, DEFAULT_BLOCK_SIZE/64, DEFAULT_PAGE_SIZE/64);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make", equalTo("Volkswagen")));
-    for (int i = 0; i < 10000; i++) {
-      assertEquals(getVwPolo().toString(), reader.read().toString());
-      assertEquals(getVwPassat().toString(), reader.read().toString());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path, column("make", equalTo("Volkswagen")))) {
+      for (int i = 0; i < 10000; i++) {
+        assertEquals(getVwPolo().toString(), reader.read().toString());
+        assertEquals(getVwPassat().toString(), reader.read().toString());
+      }
+      assertNull(reader.read());
     }
-    assertNull(reader.read());
   }
 
   @Test
   public void testFilterMatchesNoBlocks() throws IOException {
     Path path = writeCarsToParquetFile(10000, CompressionCodecName.UNCOMPRESSED, false, DEFAULT_BLOCK_SIZE/64, DEFAULT_PAGE_SIZE/64);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make", equalTo("Bogus")));
-    assertNull(reader.read());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path, column("make", equalTo("Bogus")))) {
+      assertNull(reader.read());
+    }
   }
 
   @Test
@@ -127,36 +132,38 @@ public class TestSpecificReadWrite {
     Car vwPassat = getVwPassat();
     Car bmwMini  = getBmwMini();
 
-    ParquetWriter<Car> writer = new AvroParquetWriter<Car>(path, Car.SCHEMA$,
+    try(ParquetWriter<Car> writer = new AvroParquetWriter<Car>(path, Car.SCHEMA$,
         CompressionCodecName.UNCOMPRESSED, DEFAULT_BLOCK_SIZE/128, DEFAULT_PAGE_SIZE/128,
-        false);
-    for (int i = 0; i < 10000; i++) {
-      writer.write(vwPolo);
-      writer.write(vwPassat);
-      writer.write(vwPolo);
+        false)) {
+      for (int i = 0; i < 10000; i++) {
+        writer.write(vwPolo);
+        writer.write(vwPassat);
+        writer.write(vwPolo);
+      }
+      writer.write(bmwMini); // only write BMW in last block
     }
-    writer.write(bmwMini); // only write BMW in last block
-    writer.close();
 
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make",
-        equalTo("BMW")));
-    assertEquals(getBmwMini().toString(), reader.read().toString());
-    assertNull(reader.read());
+    try(ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make",
+        equalTo("BMW")))) {
+      assertEquals(getBmwMini().toString(), reader.read().toString());
+      assertNull(reader.read());
+    }
   }
 
   @Test
   public void testFilterWithDictionary() throws IOException {
     Path path = writeCarsToParquetFile(1,CompressionCodecName.UNCOMPRESSED,true);
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("make", equalTo("Volkswagen")));
-    assertEquals(getVwPolo().toString(), reader.read().toString());
-    assertEquals(getVwPassat().toString(), reader.read().toString());
-    assertNull(reader.read());
+    try(ParquetReader<Car> reader = new AvroParquetReader<>(testConf, path, column("make", equalTo("Volkswagen")))) {
+      assertEquals(getVwPolo().toString(), reader.read().toString());
+      assertEquals(getVwPassat().toString(), reader.read().toString());
+      assertNull(reader.read());
+    }
   }
 
   @Test
   public void testFilterOnSubAttribute() throws IOException {
     Path path = writeCarsToParquetFile(1, CompressionCodecName.UNCOMPRESSED, false);
-    
+
     ParquetReader<Car> reader = new AvroParquetReader<Car>(testConf, path, column("engine.type", equalTo(EngineType.DIESEL)));
     assertEquals(reader.read().toString(), getVwPassat().toString());
     assertNull(reader.read());
@@ -196,16 +203,17 @@ public class TestSpecificReadWrite {
     projectedSchema.setFields(projectedFields);
     AvroReadSupport.setRequestedProjection(conf, projectedSchema);
 
-    ParquetReader<Car> reader = new AvroParquetReader<Car>(conf, path);
-    for (Car car = reader.read(); car != null; car = reader.read()) {
-      assertEquals(car.getDoors() != null, true);
-      assertEquals(car.getEngine() != null, true);
-      assertEquals(car.getMake() != null, true);
-      assertEquals(car.getModel() != null, true);
-      assertEquals(car.getYear() != null, true);
-      assertEquals(car.getVin() != null, true);
-      assertNull(car.getOptionalExtra());
-      assertNull(car.getServiceHistory());
+    try(ParquetReader<Car> reader = new AvroParquetReader<Car>(conf, path)) {
+      for (Car car = reader.read(); car != null; car = reader.read()) {
+        assertEquals(car.getDoors() != null, true);
+        assertEquals(car.getEngine() != null, true);
+        assertEquals(car.getMake() != null, true);
+        assertEquals(car.getModel() != null, true);
+        assertEquals(car.getYear() != null, true);
+        assertEquals(car.getVin() != null, true);
+        assertNull(car.getOptionalExtra());
+        assertNull(car.getServiceHistory());
+      }
     }
   }
 
@@ -215,14 +223,15 @@ public class TestSpecificReadWrite {
     Configuration conf = new Configuration(testConf);
     AvroReadSupport.setAvroReadSchema(conf, NewCar.SCHEMA$);
 
-    ParquetReader<NewCar> reader = new AvroParquetReader<NewCar>(conf, path);
-    for (NewCar car = reader.read(); car != null; car = reader.read()) {
-      assertEquals(car.getEngine() != null, true);
-      assertEquals(car.getBrand() != null, true);
-      assertEquals(car.getYear() != null, true);
-      assertEquals(car.getVin() != null, true);
-      assertEquals(car.getDescription() == null, true);
-      assertEquals(car.getOpt() == 5, true);
+    try(ParquetReader<NewCar> reader = new AvroParquetReader<>(conf, path)) {
+      for (NewCar car = reader.read(); car != null; car = reader.read()) {
+        assertEquals(car.getEngine() != null, true);
+        assertEquals(car.getBrand() != null, true);
+        assertEquals(car.getYear() != null, true);
+        assertEquals(car.getVin() != null, true);
+        assertEquals(car.getDescription() == null, true);
+        assertEquals(car.getOpt() == 5, true);
+      }
     }
   }
 
@@ -240,14 +249,14 @@ public class TestSpecificReadWrite {
     Car vwPassat = getVwPassat();
     Car bmwMini  = getBmwMini();
 
-    ParquetWriter<Car> writer = new AvroParquetWriter<Car>(path,Car.SCHEMA$, compression,
-        blockSize, pageSize, enableDictionary);
-    for (int i = 0; i < num; i++) {
-      writer.write(vwPolo);
-      writer.write(vwPassat);
-      writer.write(bmwMini);
+    try(ParquetWriter<Car> writer = new AvroParquetWriter<>(path, Car.SCHEMA$, compression,
+      blockSize, pageSize, enableDictionary)) {
+      for (int i = 0; i < num; i++) {
+        writer.write(vwPolo);
+        writer.write(vwPassat);
+        writer.write(bmwMini);
+      }
     }
-    writer.close();
     return path;
   }
 
diff --git a/parquet-avro/src/test/java/org/apache/parquet/avro/TestStringBehavior.java b/parquet-avro/src/test/java/org/apache/parquet/avro/TestStringBehavior.java
index c0cad99..16c465f 100644
--- a/parquet-avro/src/test/java/org/apache/parquet/avro/TestStringBehavior.java
+++ b/parquet-avro/src/test/java/org/apache/parquet/avro/TestStringBehavior.java
@@ -83,40 +83,29 @@ public class TestStringBehavior {
     file.deleteOnExit();
 
     parquetFile = new Path(file.getPath());
-    ParquetWriter<GenericRecord> parquet = AvroParquetWriter
+    try(ParquetWriter<GenericRecord> parquet = AvroParquetWriter
         .<GenericRecord>builder(parquetFile)
         .withDataModel(GenericData.get())
         .withSchema(SCHEMA)
-        .build();
-
-    try {
+        .build()) {
       parquet.write(record);
-    } finally {
-      parquet.close();
     }
 
     avroFile = temp.newFile("avro");
     avroFile.delete();
     avroFile.deleteOnExit();
-    DataFileWriter<GenericRecord> avro = new DataFileWriter<GenericRecord>(
-        new GenericDatumWriter<GenericRecord>(SCHEMA)).create(SCHEMA, avroFile);
-
-    try {
+    try(DataFileWriter<GenericRecord> avro = new DataFileWriter<GenericRecord>(
+      new GenericDatumWriter<>(SCHEMA)).create(SCHEMA, avroFile)) {
       avro.append(record);
-    } finally {
-      avro.close();
     }
   }
 
   @Test
   public void testGeneric() throws IOException {
     GenericRecord avroRecord;
-    DataFileReader<GenericRecord> avro = new DataFileReader<GenericRecord>(
-        avroFile, new GenericDatumReader<GenericRecord>(SCHEMA));
-    try {
+    try(DataFileReader<GenericRecord> avro = new DataFileReader<>(
+      avroFile, new GenericDatumReader<>(SCHEMA))) {
       avroRecord = avro.next();
-    } finally {
-      avro.close();
     }
 
     GenericRecord parquetRecord;
@@ -124,14 +113,11 @@ public class TestStringBehavior {
     conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
     AvroReadSupport.setAvroDataSupplier(conf, GenericDataSupplier.class);
     AvroReadSupport.setAvroReadSchema(conf, SCHEMA);
-    ParquetReader<GenericRecord> parquet = AvroParquetReader
+    try(ParquetReader<GenericRecord> parquet = AvroParquetReader
         .<GenericRecord>builder(parquetFile)
         .withConf(conf)
-        .build();
-    try {
+        .build()) {
       parquetRecord = parquet.read();
-    } finally {
-      parquet.close();
     }
 
     Assert.assertEquals("Avro default string class should be Utf8",
@@ -169,14 +155,10 @@ public class TestStringBehavior {
   @Test
   public void testSpecific() throws IOException {
     org.apache.parquet.avro.StringBehaviorTest avroRecord;
-    DataFileReader<org.apache.parquet.avro.StringBehaviorTest> avro =
-        new DataFileReader<org.apache.parquet.avro.StringBehaviorTest>(avroFile,
-            new SpecificDatumReader<org.apache.parquet.avro.StringBehaviorTest>(
-                org.apache.parquet.avro.StringBehaviorTest.getClassSchema()));
-    try {
+    try(DataFileReader<org.apache.parquet.avro.StringBehaviorTest> avro =
+          new DataFileReader<>(avroFile, new SpecificDatumReader<>(
+            org.apache.parquet.avro.StringBehaviorTest.getClassSchema()))) {
       avroRecord = avro.next();
-    } finally {
-      avro.close();
     }
 
     org.apache.parquet.avro.StringBehaviorTest parquetRecord;
@@ -185,15 +167,12 @@ public class TestStringBehavior {
     AvroReadSupport.setAvroDataSupplier(conf, SpecificDataSupplier.class);
     AvroReadSupport.setAvroReadSchema(conf,
         org.apache.parquet.avro.StringBehaviorTest.getClassSchema());
-    ParquetReader<org.apache.parquet.avro.StringBehaviorTest> parquet =
+    try(ParquetReader<org.apache.parquet.avro.StringBehaviorTest> parquet =
         AvroParquetReader
             .<org.apache.parquet.avro.StringBehaviorTest>builder(parquetFile)
             .withConf(conf)
-            .build();
-    try {
+            .build()) {
       parquetRecord = parquet.read();
-    } finally {
-      parquet.close();
     }
 
     Assert.assertEquals("Avro default string class should be String",
@@ -235,12 +214,9 @@ public class TestStringBehavior {
         .getSchema(ReflectRecord.class);
 
     ReflectRecord avroRecord;
-    DataFileReader<ReflectRecord> avro = new DataFileReader<ReflectRecord>(
-        avroFile, new ReflectDatumReader<ReflectRecord>(reflectSchema));
-    try {
+    try(DataFileReader<ReflectRecord> avro = new DataFileReader<>(
+      avroFile, new ReflectDatumReader<>(reflectSchema))) {
       avroRecord = avro.next();
-    } finally {
-      avro.close();
     }
 
     ReflectRecord parquetRecord;
@@ -248,14 +224,11 @@ public class TestStringBehavior {
     conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
     AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
     AvroReadSupport.setAvroReadSchema(conf, reflectSchema);
-    ParquetReader<ReflectRecord> parquet = AvroParquetReader
+    try(ParquetReader<ReflectRecord> parquet = AvroParquetReader
         .<ReflectRecord>builder(parquetFile)
         .withConf(conf)
-        .build();
-    try {
+        .build()) {
       parquetRecord = parquet.read();
-    } finally {
-      parquet.close();
     }
 
     Assert.assertEquals("Avro default string class should be String",
@@ -297,13 +270,10 @@ public class TestStringBehavior {
         .getSchema(ReflectRecordJavaClass.class);
     System.err.println("Schema: " + reflectSchema.toString(true));
     ReflectRecordJavaClass avroRecord;
-    DataFileReader<ReflectRecordJavaClass> avro =
-        new DataFileReader<ReflectRecordJavaClass>(avroFile,
-            new ReflectDatumReader<ReflectRecordJavaClass>(reflectSchema));
-    try {
+    try(DataFileReader<ReflectRecordJavaClass> avro =
+          new DataFileReader<>(avroFile,
+            new ReflectDatumReader<>(reflectSchema))) {
       avroRecord = avro.next();
-    } finally {
-      avro.close();
     }
 
     ReflectRecordJavaClass parquetRecord;
@@ -312,14 +282,11 @@ public class TestStringBehavior {
     AvroReadSupport.setAvroDataSupplier(conf, ReflectDataSupplier.class);
     AvroReadSupport.setAvroReadSchema(conf, reflectSchema);
     AvroReadSupport.setRequestedProjection(conf, reflectSchema);
-    ParquetReader<ReflectRecordJavaClass> parquet = AvroParquetReader
+    try(ParquetReader<ReflectRecordJavaClass> parquet = AvroParquetReader
         .<ReflectRecordJavaClass>builder(parquetFile)
         .withConf(conf)
-        .build();
-    try {
+        .build()) {
       parquetRecord = parquet.read();
-    } finally {
-      parquet.close();
     }
 
     // Avro uses String even if CharSequence is set