You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@orc.apache.org by do...@apache.org on 2021/08/04 18:24:39 UTC

[orc] branch branch-1.7 updated (c1b1ad7 -> 950d2f1)

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a change to branch branch-1.7
in repository https://gitbox.apache.org/repos/asf/orc.git.


    from c1b1ad7  ORC-909: Remove commons-io v2.1 dependency (#815)
     new 33fdd09  ORC-903: Migrate TestVectorOrcFile to JUnit5 (#809)
     new 950d2f1  ORC-907: Remove junit-vintage-engine from core module (#813)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 java/core/pom.xml                                  |  15 +-
 .../org/apache/orc/StringDictTestingUtils.java     |   6 +-
 .../test/org/apache/orc/TestColumnStatistics.java  |  37 +-
 .../src/test/org/apache/orc/TestCorruptTypes.java  |   6 +-
 .../test/org/apache/orc/TestInMemoryKeystore.java  |   7 +-
 .../org/apache/orc/TestNewIntegerEncoding.java     | 190 ++---
 .../test/org/apache/orc/TestOrcDSTNoTimezone.java  |  40 +-
 .../test/org/apache/orc/TestOrcFilterContext.java  |  51 +-
 .../src/test/org/apache/orc/TestOrcNoTimezone.java |  50 +-
 .../org/apache/orc/TestOrcNullOptimization.java    |  20 +-
 .../test/org/apache/orc/TestOrcTimestampPPD.java   |  20 +-
 .../src/test/org/apache/orc/TestOrcTimezone1.java  |  77 +--
 .../src/test/org/apache/orc/TestOrcTimezone2.java  |  50 +-
 .../src/test/org/apache/orc/TestOrcTimezone3.java  |  47 +-
 .../src/test/org/apache/orc/TestOrcTimezone4.java  |  19 +-
 .../test/org/apache/orc/TestOrcTimezonePPD.java    |  88 +--
 .../org/apache/orc/TestProlepticConversions.java   | 115 ++--
 java/core/src/test/org/apache/orc/TestReader.java  |  48 +-
 .../apache/orc/TestRowFilteringComplexTypes.java   |  28 +-
 .../orc/TestRowFilteringComplexTypesNulls.java     |  17 +-
 .../org/apache/orc/TestRowFilteringIOSkip.java     |  25 +-
 .../org/apache/orc/TestRowFilteringNoSkip.java     |  17 +-
 .../test/org/apache/orc/TestRowFilteringSkip.java  |  17 +-
 .../test/org/apache/orc/TestStringDictionary.java  | 131 ++--
 .../test/org/apache/orc/TestTypeDescription.java   |  79 +--
 java/core/src/test/org/apache/orc/TestUnicode.java |  68 +-
 .../test/org/apache/orc/TestUnrolledBitPack.java   |  56 +-
 .../src/test/org/apache/orc/TestVectorOrcFile.java | 764 +++++++++++----------
 .../org/apache/orc/impl/TestBitFieldReader.java    |   4 +-
 .../src/test/org/apache/orc/impl/TestBitPack.java  |  18 +-
 .../apache/orc/impl/TestColumnStatisticsImpl.java  |   6 +-
 .../orc/impl/TestConvertTreeReaderFactory.java     |  20 +-
 .../test/org/apache/orc/impl/TestCryptoUtils.java  |   5 +-
 .../apache/orc/impl/TestDataReaderProperties.java  |  40 +-
 .../test/org/apache/orc/impl/TestDateUtils.java    |   9 +-
 .../test/org/apache/orc/impl/TestDynamicArray.java |   4 +-
 .../src/test/org/apache/orc/impl/TestInStream.java |  41 +-
 .../orc/impl/TestIntegerCompressionReader.java     |   4 +-
 .../org/apache/orc/impl/TestMemoryManager.java     |  36 +-
 .../apache/orc/impl/TestOrcFilterContextImpl.java  |  31 +-
 .../org/apache/orc/impl/TestOrcLargeStripe.java    |  28 +-
 .../test/org/apache/orc/impl/TestOrcWideTable.java |   5 +-
 .../test/org/apache/orc/impl/TestOutStream.java    |  15 +-
 .../org/apache/orc/impl/TestPhysicalFsWriter.java  |   4 +-
 .../orc/impl/TestPredicatePushDownBounds.java      |   4 +-
 .../test/org/apache/orc/impl/TestReaderImpl.java   |  23 +-
 .../org/apache/orc/impl/TestRecordReaderImpl.java  |   7 +-
 .../apache/orc/impl/TestRunLengthByteReader.java   |   4 +-
 .../orc/impl/TestRunLengthIntegerReader.java       |   4 +-
 .../org/apache/orc/impl/TestSchemaEvolution.java   | 429 ++++++------
 .../apache/orc/impl/TestSerializationUtils.java    |  17 +-
 .../test/org/apache/orc/impl/TestStreamName.java   |   6 +-
 .../orc/impl/TestStringHashTableDictionary.java    |   5 +-
 .../apache/orc/impl/TestStringRedBlackTree.java    |   4 +-
 .../test/org/apache/orc/impl/TestWriterImpl.java   |  20 +-
 .../src/test/org/apache/orc/impl/TestZlib.java     |   6 +-
 .../src/test/org/apache/orc/impl/TestZstd.java     |   4 +-
 .../org/apache/orc/impl/mask/TestDataMask.java     |  28 +-
 .../org/apache/orc/impl/mask/TestRedactMask.java   |  20 +-
 .../org/apache/orc/impl/mask/TestSHA256Mask.java   |   6 +-
 .../org/apache/orc/impl/mask/TestUnmaskRange.java  |   4 +-
 .../impl/reader/TestReaderEncryptionVariant.java   |   4 +-
 .../test/org/apache/orc/util/TestBloomFilter.java  |   6 +-
 .../src/test/org/apache/orc/util/TestMurmur3.java  |   4 +-
 .../src/test/org/apache/orc/util/TestOrcUtils.java |   4 +-
 .../orc/util/TestStreamWrapperFileSystem.java      |  12 +-
 java/pom.xml                                       |  16 +-
 java/shims/pom.xml                                 |   2 +-
 68 files changed, 1402 insertions(+), 1595 deletions(-)

[orc] 02/02: ORC-907: Remove junit-vintage-engine from core module (#813)

Posted by do...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-1.7
in repository https://gitbox.apache.org/repos/asf/orc.git

commit 950d2f17405b629cc719e17e96a350f8c949e4de
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Wed Aug 4 11:18:45 2021 -0700

    ORC-907: Remove junit-vintage-engine from core module (#813)
    
    This PR removes `junit-vintage-engine`.
    
    After this PR, Apache ORC JUnit5 migration is finished.
    
    Pass the CIs.
    
    (cherry picked from commit 2a52901c522833854d9d5cf25177ead8133b3d08)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 java/core/pom.xml                                  |  16 +-
 .../org/apache/orc/StringDictTestingUtils.java     |   6 +-
 .../test/org/apache/orc/TestColumnStatistics.java  |  37 +-
 .../src/test/org/apache/orc/TestCorruptTypes.java  |   6 +-
 .../test/org/apache/orc/TestInMemoryKeystore.java  |   7 +-
 .../org/apache/orc/TestNewIntegerEncoding.java     | 190 ++++-----
 .../test/org/apache/orc/TestOrcDSTNoTimezone.java  |  40 +-
 .../test/org/apache/orc/TestOrcFilterContext.java  |  51 ++-
 .../src/test/org/apache/orc/TestOrcNoTimezone.java |  50 +--
 .../org/apache/orc/TestOrcNullOptimization.java    |  20 +-
 .../test/org/apache/orc/TestOrcTimestampPPD.java   |  20 +-
 .../src/test/org/apache/orc/TestOrcTimezone1.java  |  77 ++--
 .../src/test/org/apache/orc/TestOrcTimezone2.java  |  50 +--
 .../src/test/org/apache/orc/TestOrcTimezone3.java  |  47 +--
 .../src/test/org/apache/orc/TestOrcTimezone4.java  |  19 +-
 .../test/org/apache/orc/TestOrcTimezonePPD.java    |  88 ++---
 .../org/apache/orc/TestProlepticConversions.java   | 115 +++---
 java/core/src/test/org/apache/orc/TestReader.java  |  48 ++-
 .../apache/orc/TestRowFilteringComplexTypes.java   |  28 +-
 .../orc/TestRowFilteringComplexTypesNulls.java     |  17 +-
 .../org/apache/orc/TestRowFilteringIOSkip.java     |  25 +-
 .../org/apache/orc/TestRowFilteringNoSkip.java     |  17 +-
 .../test/org/apache/orc/TestRowFilteringSkip.java  |  17 +-
 .../test/org/apache/orc/TestStringDictionary.java  | 131 +++----
 .../test/org/apache/orc/TestTypeDescription.java   |  79 ++--
 java/core/src/test/org/apache/orc/TestUnicode.java |  68 ++--
 .../test/org/apache/orc/TestUnrolledBitPack.java   |  56 ++-
 .../src/test/org/apache/orc/TestVectorOrcFile.java |   5 +-
 .../org/apache/orc/impl/TestBitFieldReader.java    |   4 +-
 .../src/test/org/apache/orc/impl/TestBitPack.java  |  18 +-
 .../apache/orc/impl/TestColumnStatisticsImpl.java  |   6 +-
 .../orc/impl/TestConvertTreeReaderFactory.java     |  20 +-
 .../test/org/apache/orc/impl/TestCryptoUtils.java  |   5 +-
 .../apache/orc/impl/TestDataReaderProperties.java  |  40 +-
 .../test/org/apache/orc/impl/TestDateUtils.java    |   9 +-
 .../test/org/apache/orc/impl/TestDynamicArray.java |   4 +-
 .../src/test/org/apache/orc/impl/TestInStream.java |  41 +-
 .../orc/impl/TestIntegerCompressionReader.java     |   4 +-
 .../org/apache/orc/impl/TestMemoryManager.java     |  36 +-
 .../apache/orc/impl/TestOrcFilterContextImpl.java  |  31 +-
 .../org/apache/orc/impl/TestOrcLargeStripe.java    |  28 +-
 .../test/org/apache/orc/impl/TestOrcWideTable.java |   5 +-
 .../test/org/apache/orc/impl/TestOutStream.java    |  15 +-
 .../org/apache/orc/impl/TestPhysicalFsWriter.java  |   4 +-
 .../orc/impl/TestPredicatePushDownBounds.java      |   4 +-
 .../test/org/apache/orc/impl/TestReaderImpl.java   |  23 +-
 .../org/apache/orc/impl/TestRecordReaderImpl.java  |   7 +-
 .../apache/orc/impl/TestRunLengthByteReader.java   |   4 +-
 .../orc/impl/TestRunLengthIntegerReader.java       |   4 +-
 .../org/apache/orc/impl/TestSchemaEvolution.java   | 429 +++++++++++----------
 .../apache/orc/impl/TestSerializationUtils.java    |  17 +-
 .../test/org/apache/orc/impl/TestStreamName.java   |   6 +-
 .../orc/impl/TestStringHashTableDictionary.java    |   5 +-
 .../apache/orc/impl/TestStringRedBlackTree.java    |   4 +-
 .../test/org/apache/orc/impl/TestWriterImpl.java   |  20 +-
 .../src/test/org/apache/orc/impl/TestZlib.java     |   6 +-
 .../src/test/org/apache/orc/impl/TestZstd.java     |   4 +-
 .../org/apache/orc/impl/mask/TestDataMask.java     |  28 +-
 .../org/apache/orc/impl/mask/TestRedactMask.java   |  20 +-
 .../org/apache/orc/impl/mask/TestSHA256Mask.java   |   6 +-
 .../org/apache/orc/impl/mask/TestUnmaskRange.java  |   4 +-
 .../impl/reader/TestReaderEncryptionVariant.java   |   4 +-
 .../test/org/apache/orc/util/TestBloomFilter.java  |   6 +-
 .../src/test/org/apache/orc/util/TestMurmur3.java  |   4 +-
 .../src/test/org/apache/orc/util/TestOrcUtils.java |   4 +-
 .../orc/util/TestStreamWrapperFileSystem.java      |  12 +-
 java/pom.xml                                       |  16 +-
 java/shims/pom.xml                                 |   2 +-
 68 files changed, 1000 insertions(+), 1239 deletions(-)

diff --git a/java/core/pom.xml b/java/core/pom.xml
index 0f30363..644f2c7 100644
--- a/java/core/pom.xml
+++ b/java/core/pom.xml
@@ -88,7 +88,7 @@
     </dependency>
     <dependency>
       <groupId>org.junit.jupiter</groupId>
-      <artifactId>junit-jupiter-engine</artifactId>
+      <artifactId>junit-jupiter-api</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -97,23 +97,13 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.junit.vintage</groupId>
-      <artifactId>junit-vintage-engine</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.objenesis</groupId>
-      <artifactId>objenesis</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>net.bytebuddy</groupId>
-      <artifactId>byte-buddy</artifactId>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-junit-jupiter</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/java/core/src/test/org/apache/orc/StringDictTestingUtils.java b/java/core/src/test/org/apache/orc/StringDictTestingUtils.java
index cbe22eb..ec79e4f 100644
--- a/java/core/src/test/org/apache/orc/StringDictTestingUtils.java
+++ b/java/core/src/test/org/apache/orc/StringDictTestingUtils.java
@@ -23,7 +23,7 @@ import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.orc.impl.Dictionary;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 
 /**
@@ -54,8 +54,8 @@ public class StringDictTestingUtils {
     public void visit(Dictionary.VisitorContext context)
         throws IOException {
       String word = context.getText().toString();
-      assertEquals("in word " + current, words[current], word);
-      assertEquals("in word " + current, order[current], context.getOriginalPosition());
+      assertEquals(words[current], word, "in word " + current);
+      assertEquals(order[current], context.getOriginalPosition(), "in word " + current);
       buffer.reset();
       context.writeBytes(buffer);
       assertEquals(word, new String(buffer.getData(), 0, buffer.getLength(), StandardCharsets.UTF_8));
diff --git a/java/core/src/test/org/apache/orc/TestColumnStatistics.java b/java/core/src/test/org/apache/orc/TestColumnStatistics.java
index 5e87eaa..0131a5f 100644
--- a/java/core/src/test/org/apache/orc/TestColumnStatistics.java
+++ b/java/core/src/test/org/apache/orc/TestColumnStatistics.java
@@ -29,10 +29,6 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.orc.impl.ColumnStatisticsImpl;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import java.io.File;
 import java.math.BigDecimal;
@@ -42,8 +38,8 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Test ColumnStatisticsImpl for ORC.
@@ -143,9 +139,10 @@ public class TestColumnStatistics {
     final StringColumnStatistics typed = (StringColumnStatistics) stats1;
     final StringColumnStatistics typed2 = (StringColumnStatistics) stats2;
 
-    assertTrue("Upperbound cannot be more than 1024 bytes",
-        1024 >= typed.getUpperBound().getBytes(StandardCharsets.UTF_8).length);
-    assertTrue("Lowerbound cannot be more than 1024 bytes",1024 >= typed.getLowerBound().getBytes(StandardCharsets.UTF_8).length);
+    assertTrue(1024 >= typed.getUpperBound().getBytes(StandardCharsets.UTF_8).length,
+        "Upperbound cannot be more than 1024 bytes");
+    assertTrue(1024 >= typed.getLowerBound().getBytes(StandardCharsets.UTF_8).length,
+        "Lowerbound cannot be more than 1024 bytes");
 
     assertEquals(null, typed.getMinimum());
     assertEquals(null, typed.getMaximum());
@@ -157,8 +154,10 @@ public class TestColumnStatistics {
     stats1.updateString(test.getBytes(StandardCharsets.UTF_8), 0,
         test.getBytes(StandardCharsets.UTF_8).length, 0);
 
-    assertTrue("Lowerbound cannot be more than 1024 bytes", 1024 >= typed.getLowerBound().getBytes(StandardCharsets.UTF_8).length);
-    assertTrue("Upperbound cannot be more than 1024 bytes", 1024 >= typed.getUpperBound().getBytes(StandardCharsets.UTF_8).length);
+    assertTrue(1024 >= typed.getLowerBound().getBytes(StandardCharsets.UTF_8).length,
+        "Lowerbound cannot be more than 1024 bytes");
+    assertTrue(1024 >= typed.getUpperBound().getBytes(StandardCharsets.UTF_8).length,
+            "Upperbound cannot be more than 1024 bytes");
 
     assertEquals(null, typed.getMinimum());
     assertEquals(null, typed.getMaximum());
@@ -679,8 +678,10 @@ public class TestColumnStatistics {
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
     DecimalColumnStatistics statistics = (DecimalColumnStatistics) reader.getStatistics()[0];
-    assertEquals("Incorrect maximum value", new BigDecimal("-99999.99"), statistics.getMinimum().bigDecimalValue());
-    assertEquals("Incorrect minimum value", new BigDecimal("-88888.88"), statistics.getMaximum().bigDecimalValue());
+    assertEquals(new BigDecimal("-99999.99"), statistics.getMinimum().bigDecimalValue(),
+        "Incorrect maximum value");
+    assertEquals(new BigDecimal("-88888.88"), statistics.getMaximum().bigDecimalValue(),
+        "Incorrect minimum value");
   }
 
 
@@ -691,15 +692,13 @@ public class TestColumnStatistics {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     fs.setWorkingDirectory(workDir);
-    testFilePath = new Path("TestOrcFile." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(
+        "TestOrcFile." + testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 }
diff --git a/java/core/src/test/org/apache/orc/TestCorruptTypes.java b/java/core/src/test/org/apache/orc/TestCorruptTypes.java
index 0864c89..02bc699 100644
--- a/java/core/src/test/org/apache/orc/TestCorruptTypes.java
+++ b/java/core/src/test/org/apache/orc/TestCorruptTypes.java
@@ -17,13 +17,11 @@
  */
 package org.apache.orc;
 
-import org.junit.Test;
-
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestCorruptTypes {
 
diff --git a/java/core/src/test/org/apache/orc/TestInMemoryKeystore.java b/java/core/src/test/org/apache/orc/TestInMemoryKeystore.java
index 2e47650..1c9f006 100644
--- a/java/core/src/test/org/apache/orc/TestInMemoryKeystore.java
+++ b/java/core/src/test/org/apache/orc/TestInMemoryKeystore.java
@@ -21,9 +21,8 @@ import java.nio.charset.StandardCharsets;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.orc.impl.HadoopShims;
 import org.apache.orc.impl.LocalKey;
-import static org.junit.Assert.*;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.IOException;
 import java.security.Key;
@@ -40,7 +39,7 @@ public class TestInMemoryKeystore {
     super();
   }
 
-  @Before
+  @BeforeEach
   public void init() throws IOException {
     // For testing, use a fixed random number generator so that everything
     // is repeatable.
diff --git a/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java b/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
index a670e0e..dbc00a6 100644
--- a/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
+++ b/java/core/src/test/org/apache/orc/TestNewIntegerEncoding.java
@@ -17,14 +17,16 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.Random;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -32,31 +34,16 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
 
-@RunWith(value = Parameterized.class)
 public class TestNewIntegerEncoding {
 
-  private OrcFile.EncodingStrategy encodingStrategy;
-
-  public TestNewIntegerEncoding( OrcFile.EncodingStrategy es) {
-    this.encodingStrategy = es;
-  }
-
-  @Parameters
-  public static Collection<Object[]> data() {
-    Object[][] data = new Object[][] { {  OrcFile.EncodingStrategy.COMPRESSION },
-        {  OrcFile.EncodingStrategy.SPEED } };
-    return Arrays.asList(data);
+  private static Stream<Arguments> data() {
+    return Stream.of(
+        Arguments.of(OrcFile.EncodingStrategy.COMPRESSION),
+        Arguments.of(OrcFile.EncodingStrategy.SPEED));
   }
 
   public static TypeDescription getRowSchema() {
@@ -85,20 +72,18 @@ public class TestNewIntegerEncoding {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile."
-        + testCaseName.getMethodName() + ".orc");
+        + testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @Test
-  public void testBasicRow() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicRow(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema= getRowSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -126,8 +111,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicOld() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicOld(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
     long[] inp = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 4, 5, 6,
         7, 8, 9, 10, 1, 1, 1, 1, 1, 1, 10, 9, 7, 6, 5, 4, 3, 2, 1, 1, 1, 1, 1,
@@ -162,8 +148,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicNew() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicNew(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 4, 5, 6,
@@ -200,8 +187,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicDelta1() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicDelta1(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { -500, -400, -350, -325, -310 };
@@ -234,8 +222,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicDelta2() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicDelta2(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { -500, -600, -650, -675, -710 };
@@ -268,8 +257,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicDelta3() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicDelta3(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 500, 400, 350, 325, 310 };
@@ -302,8 +292,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testBasicDelta4() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBasicDelta4(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 500, 600, 650, 675, 710 };
@@ -434,8 +425,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testIntegerMin() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testIntegerMin(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -467,8 +459,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testIntegerMax() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testIntegerMax(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -501,8 +494,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testLongMin() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testLongMin(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -535,8 +529,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testLongMax() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testLongMax(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -569,8 +564,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testRandomInt() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testRandomInt(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -606,8 +602,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testRandomLong() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testRandomLong(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -643,8 +640,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseNegativeMin() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseNegativeMin(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 20, 2, 3, 2, 1, 3, 17, 71, 35, 2, 1, 139, 2, 2,
@@ -688,8 +686,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseNegativeMin2() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseNegativeMin2(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 20, 2, 3, 2, 1, 3, 17, 71, 35, 2, 1, 139, 2, 2,
@@ -733,8 +732,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseNegativeMin3() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseNegativeMin3(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 20, 2, 3, 2, 1, 3, 17, 71, 35, 2, 1, 139, 2, 2,
@@ -778,8 +778,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseNegativeMin4() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseNegativeMin4(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { 13, 13, 11, 8, 13, 10, 10, 11, 11, 14, 11, 7, 13,
@@ -814,8 +815,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseAt0() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseAt0(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -852,8 +854,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseAt1() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseAt1(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -889,8 +892,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseAt255() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseAt255(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -926,8 +930,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseAt256() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseAt256(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -963,8 +968,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBase510() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBase510(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1000,8 +1006,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBase511() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBase511(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1037,8 +1044,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseMax1() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseMax1(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1074,8 +1082,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseMax2() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseMax2(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1113,8 +1122,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseMax3() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseMax3(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1164,8 +1174,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseMax4() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseMax4(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
@@ -1219,8 +1230,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testPatchedBaseTimestamp() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPatchedBaseTimestamp(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("ts", TypeDescription.createTimestamp());
 
@@ -1288,8 +1300,9 @@ public class TestNewIntegerEncoding {
     }
   }
 
-  @Test
-  public void testDirectLargeNegatives() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDirectLargeNegatives(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -1327,8 +1340,9 @@ public class TestNewIntegerEncoding {
     assertEquals(false, rows.nextBatch(batch));
   }
 
-  @Test
-  public void testSeek() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testSeek(OrcFile.EncodingStrategy encodingStrategy) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     List<Long> input = Lists.newArrayList();
diff --git a/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java b/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
index 5921aae..28f4d88 100644
--- a/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
+++ b/java/core/src/test/org/apache/orc/TestOrcDSTNoTimezone.java
@@ -19,65 +19,43 @@ package org.apache.orc;
 
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
 import java.util.TimeZone;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
 
 /**
  * Test over an orc file that does not store time zone information in the footer
  * and it was written from a time zone that observes DST for one of the timestamp
  * values stored ('2014-06-06 12:34:56.0').
  */
-@RunWith(Parameterized.class)
 public class TestOrcDSTNoTimezone {
   Configuration conf;
   FileSystem fs;
-  String readerTimeZone;
   SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
-  public TestOrcDSTNoTimezone(String readerTZ) {
-    this.readerTimeZone = readerTZ;
-  }
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> result = Arrays.asList(new Object[][]{
-        {"America/Los_Angeles"},
-        {"Europe/Berlin"},
-        {"Asia/Jerusalem"}
-    });
-    return result;
-  }
-
-  @Before
+  @BeforeEach
   public void openFileSystem() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
 
-  @Test
-  public void testReadOldTimestampFormat() throws Exception {
+  @ParameterizedTest
+  @ValueSource(strings = {"America/Los_Angeles", "Europe/Berlin", "Asia/Jerusalem"})
+  public void testReadOldTimestampFormat(String readerTimeZone) throws Exception {
     TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
     Path oldFilePath = new Path(getClass().getClassLoader().
         getSystemResource("orc-file-dst-no-timezone.orc").getPath());
diff --git a/java/core/src/test/org/apache/orc/TestOrcFilterContext.java b/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
index 507c800..715cfc7 100644
--- a/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
+++ b/java/core/src/test/org/apache/orc/TestOrcFilterContext.java
@@ -26,16 +26,9 @@ import org.apache.hadoop.hive.ql.exec.vector.MapColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.orc.impl.OrcFilterContextImpl;
-import org.junit.Before;
-import org.junit.Test;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.instanceOf;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThrows;
-import static org.junit.Assert.assertTrue;
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestOrcFilterContext {
   private final TypeDescription schema = TypeDescription.createStruct()
@@ -67,7 +60,7 @@ public class TestOrcFilterContext {
   private final OrcFilterContext filterContext = new OrcFilterContextImpl(schema)
     .setBatch(schema.createRowBatch());
 
-  @Before
+  @BeforeEach
   public void setup() {
     filterContext.reset();
   }
@@ -76,45 +69,45 @@ public class TestOrcFilterContext {
   public void testTopLevelElementaryType() {
     ColumnVector[] vectorBranch = filterContext.findColumnVector("f1");
     assertEquals(1, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(LongColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof LongColumnVector);
   }
 
   @Test
   public void testTopLevelCompositeType() {
     ColumnVector[] vectorBranch = filterContext.findColumnVector("f3");
     assertEquals(1, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(StructColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof StructColumnVector);
 
     vectorBranch = filterContext.findColumnVector("f4");
     assertEquals(1, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(ListColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof ListColumnVector);
 
     vectorBranch = filterContext.findColumnVector("f5");
     assertEquals(1, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(MapColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof MapColumnVector);
 
     vectorBranch = filterContext.findColumnVector("f6");
     assertEquals(1, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(UnionColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof UnionColumnVector);
   }
 
   @Test
   public void testNestedType() {
     ColumnVector[] vectorBranch = filterContext.findColumnVector("f3.a");
     assertEquals(2, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(StructColumnVector.class));
-    assertThat(vectorBranch[1], instanceOf(LongColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof StructColumnVector);
+    assertTrue(vectorBranch[1] instanceof LongColumnVector);
 
     vectorBranch = filterContext.findColumnVector("f3.c");
     assertEquals(2, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(StructColumnVector.class));
-    assertThat(vectorBranch[1], instanceOf(MapColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof StructColumnVector);
+    assertTrue(vectorBranch[1] instanceof MapColumnVector);
 
     vectorBranch = filterContext.findColumnVector("f6.1.b");
     assertEquals(3, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(UnionColumnVector.class));
-    assertThat(vectorBranch[1], instanceOf(StructColumnVector.class));
-    assertThat(vectorBranch[2], instanceOf(ListColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof UnionColumnVector);
+    assertTrue(vectorBranch[1] instanceof StructColumnVector);
+    assertTrue(vectorBranch[2] instanceof ListColumnVector);
   }
 
   @Test
@@ -181,24 +174,24 @@ public class TestOrcFilterContext {
       .setBatch(topListSchema.createRowBatch());
     ColumnVector[] vectorBranch = fc.findColumnVector("_elem");
     assertEquals(2, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(ListColumnVector.class));
-    assertThat(vectorBranch[1], instanceOf(StructColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof ListColumnVector);
+    assertTrue(vectorBranch[1] instanceof StructColumnVector);
   }
 
   @Test
   public void testUnsupportedIsNullUse() {
     ColumnVector[] vectorBranch = filterContext.findColumnVector("f4._elem.a");
     assertEquals(3, vectorBranch.length);
-    assertThat(vectorBranch[0], instanceOf(ListColumnVector.class));
-    assertThat(vectorBranch[1], instanceOf(StructColumnVector.class));
-    assertThat(vectorBranch[2], instanceOf(BytesColumnVector.class));
+    assertTrue(vectorBranch[0] instanceof ListColumnVector);
+    assertTrue(vectorBranch[1] instanceof StructColumnVector);
+    assertTrue(vectorBranch[2] instanceof BytesColumnVector);
 
     assertTrue(OrcFilterContext.noNulls(vectorBranch));
     IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
                                                       () -> OrcFilterContext.isNull(vectorBranch,
                                                                                     0));
-    assertThat(exception.getMessage(), containsString("ListColumnVector"));
-    assertThat(exception.getMessage(), containsString("List and Map vectors are not supported"));
+    assertTrue(exception.getMessage().contains("ListColumnVector"));
+    assertTrue(exception.getMessage().contains("List and Map vectors are not supported"));
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java b/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
index a9d5c0b..e7f060e 100644
--- a/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
+++ b/java/core/src/test/org/apache/orc/TestOrcNoTimezone.java
@@ -19,64 +19,42 @@ package org.apache.orc;
 
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
 import java.util.TimeZone;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
 
 /**
  * Test over an orc file that does not store time zone information in the footer
  * and it was written from a time zone that does not observe DST.
  */
-@RunWith(Parameterized.class)
 public class TestOrcNoTimezone {
   Configuration conf;
   FileSystem fs;
-  String readerTimeZone;
   SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S");
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
-  public TestOrcNoTimezone(String readerTZ) {
-    this.readerTimeZone = readerTZ;
-  }
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> result = Arrays.asList(new Object[][]{
-        {"GMT-12:00"},
-        {"UTC"},
-        {"GMT+8:00"},
-    });
-    return result;
-  }
-
-  @Before
+  @BeforeEach
   public void openFileSystem() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
 
-  @Test
-  public void testReadOldTimestampFormat() throws Exception {
+  @ParameterizedTest
+  @ValueSource(strings = {"GMT-12:00", "UTC", "GMT+8:00"})
+  public void testReadOldTimestampFormat(String readerTimeZone) throws Exception {
     TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
     Path oldFilePath = new Path(getClass().getClassLoader().
         getSystemResource("orc-file-no-timezone.orc").getPath());
@@ -93,18 +71,20 @@ public class TestOrcNoTimezone {
         (reader.options().include(include));
     assertTrue(rows.nextBatch(batch));
     Timestamp timestamp = ts.asScratchTimestamp(0);
-    assertEquals("For timezone : " + TimeZone.getTimeZone(readerTimeZone),
+    assertEquals(
         Timestamp.valueOf("2014-01-01 12:34:56.0").toString(),
-        formatter.format(timestamp));
+        formatter.format(timestamp),
+        "For timezone : " + TimeZone.getTimeZone(readerTimeZone));
 
     // check the contents of second row
     rows.seekToRow(1);
     assertTrue(rows.nextBatch(batch));
     assertEquals(1, batch.size);
     timestamp = ts.asScratchTimestamp(0);
-    assertEquals("For timezone : " + TimeZone.getTimeZone(readerTimeZone),
+    assertEquals(
         Timestamp.valueOf("2014-06-06 12:34:56.0").toString(),
-        formatter.format(timestamp));
+        formatter.format(timestamp),
+        "For timezone : " + TimeZone.getTimeZone(readerTimeZone));
 
     // handle the close up
     assertFalse(rows.nextBatch(batch));
diff --git a/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java b/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
index 60cd0d4..c9cacdd 100644
--- a/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
+++ b/java/core/src/test/org/apache/orc/TestOrcNullOptimization.java
@@ -17,9 +17,12 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+
 import static org.apache.orc.TestVectorOrcFile.assertEmptyStats;
-import static org.junit.Assert.assertArrayEquals;
 
 import java.io.File;
 import java.io.IOException;
@@ -37,10 +40,6 @@ import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 import org.apache.orc.impl.RecordReaderImpl;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import com.google.common.collect.Lists;
 
@@ -108,15 +107,12 @@ public class TestOrcNullOptimization {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcNullOptimization." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java b/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
index 56a6bb4..40df981 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimestampPPD.java
@@ -27,11 +27,6 @@ import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl;
 import org.apache.orc.impl.RecordReaderImpl;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import java.io.File;
 import java.io.IOException;
@@ -39,7 +34,8 @@ import java.sql.Timestamp;
 import java.util.List;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestOrcTimestampPPD {
   Path workDir =
@@ -52,18 +48,16 @@ public class TestOrcTimestampPPD {
   public TestOrcTimestampPPD() {
   }
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestOrcTimestampPPD." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir,
+        "TestOrcTimestampPPD." + testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone1.java b/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
index 1f47e9e..95454c0 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone1.java
@@ -17,97 +17,79 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.TimeZone;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import com.google.common.collect.Lists;
 
 /**
  *
  */
-@RunWith(Parameterized.class)
 public class TestOrcTimezone1 {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
   Configuration conf;
   FileSystem fs;
   Path testFilePath;
-  String writerTimeZone;
-  String readerTimeZone;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
-  public TestOrcTimezone1(String writerTZ, String readerTZ) {
-    this.writerTimeZone = writerTZ;
-    this.readerTimeZone = readerTZ;
-  }
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> result = Arrays.asList(new Object[][]{
-        /* Extreme timezones */
-        {"GMT-12:00", "GMT+14:00"},
+  private static Stream<Arguments> data() {
+    return Stream.of(/* Extreme timezones */
+        Arguments.of("GMT-12:00", "GMT+14:00"),
         /* No difference in DST */
-        {"America/Los_Angeles", "America/Los_Angeles"}, /* same timezone both with DST */
-        {"Europe/Berlin", "Europe/Berlin"}, /* same as above but europe */
-        {"America/Phoenix", "Asia/Kolkata"} /* Writer no DST, Reader no DST */,
-        {"Europe/Berlin", "America/Los_Angeles"} /* Writer DST, Reader DST */,
-        {"Europe/Berlin", "America/Chicago"} /* Writer DST, Reader DST */,
+        Arguments.of("America/Los_Angeles", "America/Los_Angeles"), /* same timezone both with DST */
+        Arguments.of("Europe/Berlin", "Europe/Berlin"), /* same as above but europe */
+        Arguments.of("America/Phoenix", "Asia/Kolkata") /* Writer no DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "America/Los_Angeles") /* Writer DST, Reader DST */,
+        Arguments.of("Europe/Berlin", "America/Chicago") /* Writer DST, Reader DST */,
         /* With DST difference */
-        {"Europe/Berlin", "UTC"},
-        {"UTC", "Europe/Berlin"} /* Writer no DST, Reader DST */,
-        {"America/Los_Angeles", "Asia/Kolkata"} /* Writer DST, Reader no DST */,
-        {"Europe/Berlin", "Asia/Kolkata"} /* Writer DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "UTC"),
+        Arguments.of("UTC", "Europe/Berlin") /* Writer no DST, Reader DST */,
+        Arguments.of("America/Los_Angeles", "Asia/Kolkata") /* Writer DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "Asia/Kolkata") /* Writer DST, Reader no DST */,
         /* Timezone offsets for the reader has changed historically */
-        {"Asia/Saigon", "Pacific/Enderbury"},
-        {"UTC", "Asia/Jerusalem"},
+        Arguments.of("Asia/Saigon", "Pacific/Enderbury"),
+        Arguments.of("UTC", "Asia/Jerusalem")
 
         // NOTE:
         // "1995-01-01 03:00:00.688888888" this is not a valid time in Pacific/Enderbury timezone.
         // On 1995-01-01 00:00:00 GMT offset moved from -11:00 hr to +13:00 which makes all values
         // on 1995-01-01 invalid. Try this with joda time
         // new MutableDateTime("1995-01-01", DateTimeZone.forTimeZone(readerTimeZone));
-    });
-    return result;
+    );
   }
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
 
-  @Test
-  public void testTimestampWriter() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampWriter(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createTimestamp();
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
@@ -153,8 +135,9 @@ public class TestOrcTimezone1 {
     rows.close();
   }
 
-  @Test
-  public void testReadTimestampFormat_0_11() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testReadTimestampFormat_0_11(String writerTimeZone, String readerTimeZone) throws Exception {
     TimeZone.setDefault(TimeZone.getTimeZone(readerTimeZone));
     Path oldFilePath = new Path(getClass().getClassLoader().
         getSystemResource("orc-file-11-format.orc").getPath());
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone2.java b/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
index 32fc838..7319e76 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone2.java
@@ -17,86 +17,68 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.Random;
 import java.util.TimeZone;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import com.google.common.collect.Lists;
 
 /**
  *
  */
-@RunWith(Parameterized.class)
 public class TestOrcTimezone2 {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
   Configuration conf;
   FileSystem fs;
   Path testFilePath;
-  String writerTimeZone;
-  String readerTimeZone;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
-  public TestOrcTimezone2(String writerTZ, String readerTZ) {
-    this.writerTimeZone = writerTZ;
-    this.readerTimeZone = readerTZ;
-  }
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
+  private static Stream<Arguments> data() {
     String[] allTimeZones = TimeZone.getAvailableIDs();
     Random rand = new Random(123);
     int len = allTimeZones.length;
     int n = 500;
-    Object[][] data = new Object[n][];
+    Arguments[] data = new Arguments[n];
     for (int i = 0; i < n; i++) {
       int wIdx = rand.nextInt(len);
       int rIdx = rand.nextInt(len);
-      data[i] = new Object[2];
-      data[i][0] = allTimeZones[wIdx];
-      data[i][1] = allTimeZones[rIdx];
+      data[i] = Arguments.of(allTimeZones[wIdx], allTimeZones[rIdx]);
     }
-    return Arrays.asList(data);
+    return Stream.of(data);
   }
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
 
-  @Test
-  public void testTimestampWriter() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampWriter(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createTimestamp();
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone3.java b/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
index 513e92e..f607de8 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone3.java
@@ -17,76 +17,57 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.TimeZone;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import com.google.common.collect.Lists;
 
 /**
  *
  */
-@RunWith(Parameterized.class)
 public class TestOrcTimezone3 {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
   Configuration conf;
   FileSystem fs;
   Path testFilePath;
-  String writerTimeZone;
-  String readerTimeZone;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
 
-  public TestOrcTimezone3(String writerTZ, String readerTZ) {
-    this.writerTimeZone = writerTZ;
-    this.readerTimeZone = readerTZ;
+  private static Stream<Arguments> data() {
+    return Stream.of(Arguments.of("America/Chicago", "America/Los_Angeles"));
   }
 
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> result = Arrays.asList(new Object[][]{
-        {"America/Chicago", "America/Los_Angeles"},
-    });
-    return result;
-  }
-
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcTimezone3." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
 
-  @Test
-  public void testTimestampWriter() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampWriter(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createTimestamp();
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezone4.java b/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
index 167759e..097adbd 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezone4.java
@@ -23,11 +23,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import java.io.File;
 import java.sql.Timestamp;
@@ -35,7 +30,8 @@ import java.text.SimpleDateFormat;
 import java.util.List;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.*;
 
 /**
  *
@@ -52,19 +48,16 @@ public class TestOrcTimezone4 {
   public TestOrcTimezone4() {
   }
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcTimezone4." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
diff --git a/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java b/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
index 2756fc9..1c7d7bd 100644
--- a/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
+++ b/java/core/src/test/org/apache/orc/TestOrcTimezonePPD.java
@@ -15,16 +15,19 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
 import java.util.TimeZone;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,13 +43,6 @@ import org.apache.orc.impl.RecordReaderImpl;
 import org.apache.orc.impl.SerializationUtils;
 import org.apache.orc.util.BloomFilter;
 import org.apache.orc.util.BloomFilterIO;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import com.google.common.collect.Lists;
 import org.slf4j.Logger;
@@ -55,7 +51,6 @@ import org.slf4j.LoggerFactory;
 /**
  *
  */
-@RunWith(Parameterized.class)
 public class TestOrcTimezonePPD {
   private static final Logger LOG = LoggerFactory.getLogger(TestOrcTimezonePPD.class);
 
@@ -64,55 +59,42 @@ public class TestOrcTimezonePPD {
   Configuration conf;
   FileSystem fs;
   Path testFilePath;
-  String writerTimeZone;
-  String readerTimeZone;
   static TimeZone defaultTimeZone = TimeZone.getDefault();
   TimeZone utcTz = TimeZone.getTimeZone("UTC");
   DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
-  public TestOrcTimezonePPD(String writerTZ, String readerTZ) {
-    this.writerTimeZone = writerTZ;
-    this.readerTimeZone = readerTZ;
-  }
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> data() {
-    List<Object[]> result = Arrays.asList(new Object[][]{
-      {"US/Eastern", "America/Los_Angeles"},
-      {"US/Eastern", "UTC"},
+  private static Stream<Arguments> data() {
+    return Stream.of(
+        Arguments.of("US/Eastern", "America/Los_Angeles"),
+        Arguments.of("US/Eastern", "UTC"),
         /* Extreme timezones */
-      {"GMT-12:00", "GMT+14:00"},
+        Arguments.of("GMT-12:00", "GMT+14:00"),
         /* No difference in DST */
-      {"America/Los_Angeles", "America/Los_Angeles"}, /* same timezone both with DST */
-      {"Europe/Berlin", "Europe/Berlin"}, /* same as above but europe */
-      {"America/Phoenix", "Asia/Kolkata"} /* Writer no DST, Reader no DST */,
-      {"Europe/Berlin", "America/Los_Angeles"} /* Writer DST, Reader DST */,
-      {"Europe/Berlin", "America/Chicago"} /* Writer DST, Reader DST */,
+        Arguments.of("America/Los_Angeles", "America/Los_Angeles"), /* same timezone both with DST */
+        Arguments.of("Europe/Berlin", "Europe/Berlin"), /* same as above but europe */
+        Arguments.of("America/Phoenix", "Asia/Kolkata") /* Writer no DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "America/Los_Angeles") /* Writer DST, Reader DST */,
+        Arguments.of("Europe/Berlin", "America/Chicago") /* Writer DST, Reader DST */,
         /* With DST difference */
-      {"Europe/Berlin", "UTC"},
-      {"UTC", "Europe/Berlin"} /* Writer no DST, Reader DST */,
-      {"America/Los_Angeles", "Asia/Kolkata"} /* Writer DST, Reader no DST */,
-      {"Europe/Berlin", "Asia/Kolkata"} /* Writer DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "UTC"),
+        Arguments.of("UTC", "Europe/Berlin") /* Writer no DST, Reader DST */,
+        Arguments.of("America/Los_Angeles", "Asia/Kolkata") /* Writer DST, Reader no DST */,
+        Arguments.of("Europe/Berlin", "Asia/Kolkata") /* Writer DST, Reader no DST */,
         /* Timezone offsets for the reader has changed historically */
-      {"Asia/Saigon", "Pacific/Enderbury"},
-      {"UTC", "Asia/Jerusalem"},
-    });
-    return result;
+        Arguments.of("Asia/Saigon", "Pacific/Enderbury"),
+        Arguments.of("UTC", "Asia/Jerusalem"));
   }
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestOrcFile." +
-      testCaseName.getMethodName() + ".orc");
+      testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @After
+  @AfterEach
   public void restoreTimeZone() {
     TimeZone.setDefault(defaultTimeZone);
   }
@@ -126,8 +108,9 @@ public class TestOrcTimezonePPD {
       literal, literalList);
   }
 
-  @Test
-  public void testTimestampPPDMinMax() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampPPDMinMax(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createTimestamp();
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
@@ -210,8 +193,9 @@ public class TestOrcTimezonePPD {
     return result.build();
   }
 
-  @Test
-  public void testTimestampPPDBloomFilter() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampPPDBloomFilter(String writerTimeZone, String readerTimeZone) throws Exception {
     LOG.info("Writer = " + writerTimeZone + " reader = " + readerTimeZone);
     TypeDescription schema = TypeDescription.createStruct().addField("ts", TypeDescription.createTimestamp());
 
@@ -283,8 +267,9 @@ public class TestOrcTimezonePPD {
       bf));
   }
 
-  @Test
-  public void testTimestampMinMaxAndBloomFilter() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampMinMaxAndBloomFilter(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createStruct().addField("ts", TypeDescription.createTimestamp());
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
@@ -357,8 +342,9 @@ public class TestOrcTimezonePPD {
     assertEquals(SearchArgument.TruthValue.NO, RecordReaderImpl.evaluatePredicate(colStats[1], pred, bf));
   }
 
-  @Test
-  public void testTimestampAllNulls() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampAllNulls(String writerTimeZone, String readerTimeZone) throws Exception {
     TypeDescription schema = TypeDescription.createStruct().addField("ts", TypeDescription.createTimestamp());
 
     TimeZone.setDefault(TimeZone.getTimeZone(writerTimeZone));
diff --git a/java/core/src/test/org/apache/orc/TestProlepticConversions.java b/java/core/src/test/org/apache/orc/TestProlepticConversions.java
index 688be7a..0066204 100644
--- a/java/core/src/test/org/apache/orc/TestProlepticConversions.java
+++ b/java/core/src/test/org/apache/orc/TestProlepticConversions.java
@@ -31,12 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.orc.impl.DateUtils;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 import java.io.File;
 import java.nio.charset.StandardCharsets;
@@ -48,34 +42,27 @@ import java.util.GregorianCalendar;
 import java.util.List;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
 import org.threeten.extra.chrono.HybridChronology;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 /**
  * This class tests all of the combinations of reading and writing the hybrid
  * and proleptic calendars.
  */
-@RunWith(Parameterized.class)
 public class TestProlepticConversions {
 
-  @Parameterized.Parameter
-  public boolean writerProlepticGregorian;
-
-  @Parameterized.Parameter(1)
-  public boolean readerProlepticGregorian;
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> getParameters() {
-    List<Object[]> result = new ArrayList<>();
-    final boolean[] BOOLEANS = new boolean[]{false, true};
-    for(Boolean writer: BOOLEANS) {
-      for (Boolean reader: BOOLEANS) {
-        result.add(new Object[]{writer, reader});
-      }
-    }
-    return result;
+  private static Stream<Arguments> data() {
+    return Stream.of(
+        Arguments.of(false, false),
+        Arguments.of(false, true),
+        Arguments.of(true, false),
+        Arguments.of(true, true));
   }
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir",
@@ -95,14 +82,11 @@ public class TestProlepticConversions {
   private FileSystem fs;
   private Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void setupPath() throws Exception {
+  @BeforeEach
+  public void setupPath(TestInfo testInfo) throws Exception {
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestProlepticConversion." +
-       testCaseName.getMethodName().replaceFirst("\\[[0-9]+]", "") + ".orc");
+       testInfo.getTestMethod().get().getName().replaceFirst("\\[[0-9]+]", "") + ".orc");
     fs.delete(testFilePath, false);
   }
 
@@ -112,8 +96,10 @@ public class TestProlepticConversions {
     return result;
   }
 
-  @Test
-  public void testReadWrite() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testReadWrite(
+      boolean writerProlepticGregorian, boolean readerProlepticGregorian) throws Exception {
     TypeDescription schema = TypeDescription.fromString(
         "struct<d:date,t:timestamp,i:timestamp with local time zone>");
     try (Writer writer = OrcFile.createWriter(testFilePath,
@@ -197,10 +183,13 @@ public class TestProlepticConversions {
       for(int r=0; r < batch.size; ++r) {
         String expectedD = String.format("%04d-01-23", r * 2 + 1);
         String expectedT = String.format("%04d-03-21 %02d:12:34", 2 * r + 1, r % 24);
-        assertEquals("row " + r, expectedD, readerChronology.dateEpochDay(d.vector[r])
-            .format(dateFormat));
-        assertEquals("row " + r, expectedT, timeFormat.format(t.asScratchTimestamp(r)));
-        assertEquals("row " + r, expectedT, timeFormat.format(i.asScratchTimestamp(r)));
+        assertEquals(expectedD,
+            readerChronology.dateEpochDay(d.vector[r]).format(dateFormat),
+            "row " + r);
+        assertEquals(expectedT, timeFormat.format(t.asScratchTimestamp(r)),
+            "row " + r);
+        assertEquals(expectedT, timeFormat.format(i.asScratchTimestamp(r)),
+            "row " + r);
       }
     }
   }
@@ -208,8 +197,10 @@ public class TestProlepticConversions {
   /**
    * Test all of the type conversions from/to date.
    */
-  @Test
-  public void testSchemaEvolutionDate() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testSchemaEvolutionDate(
+      boolean writerProlepticGregorian, boolean readerProlepticGregorian) throws Exception {
     TypeDescription schema = TypeDescription.fromString(
         "struct<d2s:date,d2t:date,s2d:string,t2d:timestamp>");
     try (Writer writer = OrcFile.createWriter(testFilePath,
@@ -271,12 +262,12 @@ public class TestProlepticConversions {
         String expectedD1 = String.format("%04d-01-23", 2 * r + 1);
         String expectedD2 = expectedD1 + " 00:00:00";
         String expectedT = String.format("%04d-03-21", 2 * r + 1);
-        assertEquals("row " + r, expectedD1, d2s.toString(r));
-        assertEquals("row " + r, expectedD2, timeFormat.format(d2t.asScratchTimestamp(r)));
-        assertEquals("row " + r, expectedD1, DateUtils.printDate((int) s2d.vector[r],
-            readerProlepticGregorian));
-        assertEquals("row " + r, expectedT, dateFormat.format(
-            new Date(TimeUnit.DAYS.toMillis(t2d.vector[r]))));
+        assertEquals(expectedD1, d2s.toString(r), "row " + r);
+        assertEquals(expectedD2, timeFormat.format(d2t.asScratchTimestamp(r)), "row " + r);
+        assertEquals(expectedD1, DateUtils.printDate((int) s2d.vector[r],
+            readerProlepticGregorian), "row " + r);
+        assertEquals(expectedT, dateFormat.format(
+            new Date(TimeUnit.DAYS.toMillis(t2d.vector[r]))), "row " + r);
       }
       assertEquals(false, rows.nextBatch(batch));
     }
@@ -286,8 +277,10 @@ public class TestProlepticConversions {
    * Test all of the type conversions from/to timestamp, except for date,
    * which was handled above.
    */
-  @Test
-  public void testSchemaEvolutionTimestamp() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testSchemaEvolutionTimestamp(
+      boolean writerProlepticGregorian, boolean readerProlepticGregorian) throws Exception {
     TypeDescription schema = TypeDescription.fromString(
         "struct<t2i:timestamp,t2d:timestamp,t2D:timestamp,t2s:timestamp,"
         + "i2t:bigint,d2t:decimal(18,2),D2t:double,s2t:string>");
@@ -365,18 +358,22 @@ public class TestProlepticConversions {
       for(int r=0; r < batch.size; ++r) {
         String time = String.format("%04d-03-21 %02d:12:34.12", 2 * r + 1, r % 24);
         long millis = DateUtils.parseTime(time, readerProlepticGregorian, true);
-        assertEquals("row " + r, time.substring(0, time.length() - 3),
-            DateUtils.printTime(i2t.time[r], readerProlepticGregorian, true));
-        assertEquals("row " + r, time,
-            DateUtils.printTime(d2t.time[r], readerProlepticGregorian, true));
-        assertEquals("row " + r, time,
-            DateUtils.printTime(D2t.time[r], readerProlepticGregorian, true));
-        assertEquals("row " + r, time,
-            DateUtils.printTime(s2t.time[r], readerProlepticGregorian, true));
-        assertEquals("row " + r, Math.floorDiv(millis, 1000), t2i.vector[r]);
-        assertEquals("row " + r, Math.floorDiv(millis, 10), t2d.vector[r]);
-        assertEquals("row " + r, millis/1000.0, t2D.vector[r], 0.1);
-        assertEquals("row " + r, time, t2s.toString(r));
+        assertEquals(time.substring(0, time.length() - 3),
+            DateUtils.printTime(i2t.time[r], readerProlepticGregorian, true),
+            "row " + r);
+        assertEquals(time,
+            DateUtils.printTime(d2t.time[r], readerProlepticGregorian, true),
+            "row " + r);
+        assertEquals(time,
+            DateUtils.printTime(D2t.time[r], readerProlepticGregorian, true),
+            "row " + r);
+        assertEquals(time,
+            DateUtils.printTime(s2t.time[r], readerProlepticGregorian, true),
+            "row " + r);
+        assertEquals(Math.floorDiv(millis, 1000), t2i.vector[r], "row " + r);
+        assertEquals(Math.floorDiv(millis, 10), t2d.vector[r], "row " + r);
+        assertEquals(millis/1000.0, t2D.vector[r], 0.1, "row " + r);
+        assertEquals(time, t2s.toString(r), "row " + r);
       }
       assertEquals(false, rows.nextBatch(batch));
     }
diff --git a/java/core/src/test/org/apache/orc/TestReader.java b/java/core/src/test/org/apache/orc/TestReader.java
index 26d71ef..4c18957 100644
--- a/java/core/src/test/org/apache/orc/TestReader.java
+++ b/java/core/src/test/org/apache/orc/TestReader.java
@@ -17,7 +17,8 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.File;
 
@@ -25,10 +26,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 public class TestReader {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
@@ -37,15 +34,12 @@ public class TestReader {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, TestReader.class.getSimpleName() + "." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
@@ -59,24 +53,28 @@ public class TestReader {
     assertEquals(0, reader.getNumberOfRows());
   }
 
-  @Test(expected=FileFormatException.class)
+  @Test
   public void testReadFileLengthLessThanMagic() throws Exception {
-    FSDataOutputStream fout = fs.create(testFilePath);
-    fout.writeBoolean(true);
-    fout.close();
-    assertEquals(1, fs.getFileStatus(testFilePath).getLen());
-    OrcFile.createReader(testFilePath,
-      OrcFile.readerOptions(conf).filesystem(fs));
+    assertThrows(FileFormatException.class, () -> {
+      FSDataOutputStream fout = fs.create(testFilePath);
+      fout.writeBoolean(true);
+      fout.close();
+      assertEquals(1, fs.getFileStatus(testFilePath).getLen());
+      OrcFile.createReader(testFilePath,
+          OrcFile.readerOptions(conf).filesystem(fs));
+    });
   }
 
-  @Test(expected=FileFormatException.class)
+  @Test
   public void testReadFileInvalidHeader() throws Exception {
-    FSDataOutputStream fout = fs.create(testFilePath);
-    fout.writeLong(1);
-    fout.close();
-    assertEquals(8, fs.getFileStatus(testFilePath).getLen());
-    OrcFile.createReader(testFilePath,
-      OrcFile.readerOptions(conf).filesystem(fs));
+    assertThrows(FileFormatException.class, () -> {
+      FSDataOutputStream fout = fs.create(testFilePath);
+      fout.writeLong(1);
+      fout.close();
+      assertEquals(8, fs.getFileStatus(testFilePath).getLen());
+      OrcFile.createReader(testFilePath,
+          OrcFile.readerOptions(conf).filesystem(fs));
+    });
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
index 2bccc17..0bcdb17 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypes.java
@@ -30,11 +30,9 @@ import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.impl.RecordReaderImpl;
-import static org.junit.Assert.*;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
+
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.File;
 
@@ -48,14 +46,12 @@ public class TestRowFilteringComplexTypes {
 
     private static final int ColumnBatchRows = 1024;
 
-    @Rule
-    public TestName testCaseName = new TestName();
-
-    @Before
-    public void openFileSystem() throws Exception {
+    @BeforeEach
+    public void openFileSystem(TestInfo testInfo) throws Exception {
         conf = new Configuration();
         fs = FileSystem.getLocal(conf);
-        testFilePath = new Path(workDir, "TestRowFilteringComplexTypes." + testCaseName.getMethodName() + ".orc");
+        testFilePath = new Path(workDir,
+            "TestRowFilteringComplexTypes." + testInfo.getTestMethod().get().getName() + ".orc");
         fs.delete(testFilePath, false);
     }
 
@@ -177,14 +173,14 @@ public class TestRowFilteringComplexTypes {
                 for (int r = 0; r < batch.size; ++r) {
                     int row = batch.selected[r];
                     int originalRow = (r + previousBatchRows) * 2;
-                    assertEquals("row " + originalRow, originalRow, col1.vector[row]);
-                    assertEquals("row " + originalRow, 0, col2.tags[row]);
-                    assertEquals("row " + originalRow,
-                        originalRow * 1000, innerCol1.vector[row]);
+                    String msg = "row " + originalRow;
+                    assertEquals(originalRow, col1.vector[row], msg);
+                    assertEquals(0, col2.tags[row], msg);
+                    assertEquals(originalRow * 1000, innerCol1.vector[row], msg);
                 }
                 // check to make sure that we didn't read innerCol2
                 for(int r = 1; r < ColumnBatchRows; r += 2) {
-                    assertEquals("row " + r, 0, innerCol2.vector[r]);
+                    assertEquals(0, innerCol2.vector[r], "row " + r);
                 }
                 previousBatchRows += batch.size;
             }
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
index b5d4b39..0785410 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringComplexTypesNulls.java
@@ -31,8 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.impl.OrcFilterContextImpl;
-import org.junit.BeforeClass;
-import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,8 +42,8 @@ import java.util.Random;
 import java.util.Set;
 import java.util.function.Consumer;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestRowFilteringComplexTypesNulls {
   private static final Logger LOG =
@@ -73,7 +71,7 @@ public class TestRowFilteringComplexTypesNulls {
   private static final String[] FilterColumns = new String[] {"ridx", "s2.f2", "u3.0"};
   private static final int scale = 3;
 
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws IOException {
     conf = new Configuration();
     fs = FileSystem.get(conf);
@@ -193,9 +191,8 @@ public class TestRowFilteringComplexTypesNulls {
     assertEquals(0, rowCount);
     // We should read less than half the length of the file
     double readPercentage = readPercentage(stats, fs.getFileStatus(filePath).getLen());
-    assertTrue(String.format("Bytes read %.2f%% should be less than 50%%",
-                             readPercentage),
-               readPercentage < 50);
+    assertTrue(readPercentage < 50,
+        String.format("Bytes read %.2f%% should be less than 50%%", readPercentage));
   }
 
   private long validateFilteredRecordReader(RecordReader rr, VectorizedRowBatch b)
@@ -328,8 +325,8 @@ public class TestRowFilteringComplexTypesNulls {
       // stripe change
       bytesRead = readEnd().getBytesRead();
       seekToRow(rr, b, 1024);
-      assertTrue("Change of stripe should require more IO",
-                 readEnd().getBytesRead() > bytesRead);
+      assertTrue(readEnd().getBytesRead() > bytesRead,
+          "Change of stripe should require more IO");
     }
     FileSystem.Statistics stats = readEnd();
     double readPercentage = readPercentage(stats, fs.getFileStatus(filePath).getLen());
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java b/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
index 38d0ba2..b310ee8 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringIOSkip.java
@@ -30,9 +30,6 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.impl.OrcFilterContextImpl;
-import static org.junit.Assert.*;
-import org.junit.BeforeClass;
-import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,6 +41,9 @@ import java.util.Random;
 import java.util.Set;
 import java.util.function.Consumer;
 
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+
 public class TestRowFilteringIOSkip {
   private static final Logger LOG = LoggerFactory.getLogger(TestRowFilteringIOSkip.class);
   private static final Path workDir = new Path(System.getProperty("test.tmp.dir",
@@ -65,7 +65,7 @@ public class TestRowFilteringIOSkip {
   private static final String[] FilterColumns = new String[] {"f1", "ridx"};
   private static final int scale = 3;
 
-  @BeforeClass
+  @BeforeAll
   public static void setup() throws IOException {
     conf = new Configuration();
     fs = FileSystem.get(conf);
@@ -139,10 +139,10 @@ public class TestRowFilteringIOSkip {
     FileSystem.Statistics stats = readEnd();
     assertEquals(RowCount, rowCount);
     // We should read less than half the length of the file
-    assertTrue(String.format("Bytes read %d is not half of file size %d",
-                                    stats.getBytesRead(),
-                                    r.getContentLength()),
-                      stats.getBytesRead() < r.getContentLength() / 2);
+    assertTrue(stats.getBytesRead() < r.getContentLength() / 2,
+        String.format("Bytes read %d is not half of file size %d",
+            stats.getBytesRead(),
+            r.getContentLength()));
   }
 
   @Test
@@ -216,9 +216,8 @@ public class TestRowFilteringIOSkip {
     assertEquals(0, rowCount);
     // We should read less than half the length of the file
     double readPercentage = readPercentage(stats, fs.getFileStatus(filePath).getLen());
-    assertTrue(String.format("Bytes read %.2f%% should be less than 50%%",
-                                    readPercentage),
-                      readPercentage < 50);
+    assertTrue(readPercentage < 50,
+        String.format("Bytes read %.2f%% should be less than 50%%", readPercentage));
   }
 
   @Test
@@ -316,8 +315,8 @@ public class TestRowFilteringIOSkip {
       // stripe change
       bytesRead = readEnd().getBytesRead();
       seekToRow(rr, b, 1024);
-      assertTrue("Change of stripe should require more IO",
-                        readEnd().getBytesRead() > bytesRead);
+      assertTrue(readEnd().getBytesRead() > bytesRead,
+          "Change of stripe should require more IO");
     }
     FileSystem.Statistics stats = readEnd();
     double readPercentage = readPercentage(stats, fs.getFileStatus(filePath).getLen());
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
index 79a7db7..a845fdc 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringNoSkip.java
@@ -24,15 +24,12 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.orc.impl.RecordReaderImpl;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import java.io.File;
 import java.sql.Timestamp;
 
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Types that are not skipped at row-level include: Long, Short, Int, Date, Binary
@@ -50,14 +47,12 @@ public class TestRowFilteringNoSkip {
 
   private static final int ColumnBatchRows = 1024;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestRowFilteringNoSkip." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestRowFilteringNoSkip." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
diff --git a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
index 4d19146c..127402c 100644
--- a/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
+++ b/java/core/src/test/org/apache/orc/TestRowFilteringSkip.java
@@ -32,10 +32,6 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.impl.OrcFilterContextImpl;
 import org.apache.orc.impl.RecordReaderImpl;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import java.io.File;
 import java.nio.charset.StandardCharsets;
@@ -44,7 +40,8 @@ import java.sql.Timestamp;
 import java.text.Format;
 import java.text.SimpleDateFormat;
 
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Types that are skipped at row-level include: Decimal, Decimal64, Double, Float, Char, VarChar, String, Boolean, Timestamp
@@ -61,14 +58,12 @@ public class TestRowFilteringSkip {
 
   private static final int ColumnBatchRows = 1024;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestRowFilteringSkip." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestRowFilteringSkip." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
diff --git a/java/core/src/test/org/apache/orc/TestStringDictionary.java b/java/core/src/test/org/apache/orc/TestStringDictionary.java
index 08e9adc..34f7bd5 100644
--- a/java/core/src/test/org/apache/orc/TestStringDictionary.java
+++ b/java/core/src/test/org/apache/orc/TestStringDictionary.java
@@ -17,16 +17,17 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -46,15 +47,8 @@ import org.apache.orc.impl.writer.StringTreeWriter;
 import org.apache.orc.impl.writer.TreeWriter;
 import org.apache.orc.impl.writer.WriterContext;
 import org.apache.orc.impl.writer.WriterEncryptionVariant;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
 
-@RunWith(Parameterized.class)
 public class TestStringDictionary {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test"
@@ -63,34 +57,24 @@ public class TestStringDictionary {
   private Configuration conf;
   private FileSystem fs;
   private Path testFilePath;
-  private String dictImplString;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestStringDictionary." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestStringDictionary." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
-    OrcConf.DICTIONARY_IMPL.setString(conf, dictImplString);
-  }
-
-  @Parameterized.Parameters
-  public static Collection params() {
-    return Arrays.asList(new Object[][] {
-        { "RBTREE" },
-        { "HASH" }
-    });
   }
 
-  public TestStringDictionary(String dictImpl) {
-    this.dictImplString = dictImpl;
+  private static Stream<Arguments> data() {
+    return Stream.of(Arguments.of("RBTREE"), Arguments.of("HASH"));
   }
 
-  @Test
-  public void testTooManyDistinct() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTooManyDistinct(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema = TypeDescription.createString();
 
     Writer writer = OrcFile.createWriter(
@@ -133,8 +117,10 @@ public class TestStringDictionary {
     }
   }
 
-  @Test
-  public void testHalfDistinct() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testHalfDistinct(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     final int totalSize = 20000;
     final int bound = 10000;
 
@@ -300,8 +286,10 @@ public class TestStringDictionary {
     }
   }
 
-  @Test
-  public void testNonDistinctDisabled() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testNonDistinctDisabled(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema = TypeDescription.createString();
 
     conf.set(OrcConf.DICTIONARY_KEY_SIZE_THRESHOLD.getAttribute(), "0.0");
@@ -322,8 +310,10 @@ public class TestStringDictionary {
     assertEquals(6000, output.buffer.size());
   }
 
-  @Test
-  public void testTooManyDistinctCheckDisabled() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTooManyDistinctCheckDisabled(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema = TypeDescription.createString();
 
     conf.setBoolean(OrcConf.ROW_INDEX_STRIDE_DICTIONARY_CHECK.getAttribute(), false);
@@ -366,8 +356,10 @@ public class TestStringDictionary {
     }
   }
 
-  @Test
-  public void testHalfDistinctCheckDisabled() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testHalfDistinctCheckDisabled(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema = TypeDescription.createString();
 
     conf.setBoolean(OrcConf.ROW_INDEX_STRIDE_DICTIONARY_CHECK.getAttribute(),
@@ -417,8 +409,10 @@ public class TestStringDictionary {
     }
   }
 
-  @Test
-  public void testTooManyDistinctV11AlwaysDictionary() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTooManyDistinctV11AlwaysDictionary(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema = TypeDescription.createString();
 
     Writer writer = OrcFile.createWriter(
@@ -467,8 +461,10 @@ public class TestStringDictionary {
    * `longString` column (presumably for a low hit-ratio), while preserving DICTIONARY_V2 for `shortString`.
    * @throws Exception on unexpected failure
    */
-  @Test
-  public void testDisableDictionaryForSpecificColumn() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDisableDictionaryForSpecificColumn(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     final String SHORT_STRING_VALUE = "foo";
     final String  LONG_STRING_VALUE = "BAAAAAAAAR!!";
 
@@ -516,27 +512,28 @@ public class TestStringDictionary {
       // within the same package as ORC reader
       OrcProto.StripeFooter footer = ((RecordReaderImpl) recordReader).readStripeFooter(stripe);
       for (int i = 0; i < footer.getColumnsCount(); ++i) {
+        assertEquals(3, footer.getColumnsCount(),
+            "Expected 3 columns in the footer: One for the Orc Struct, and two for its members.");
         assertEquals(
-            "Expected 3 columns in the footer: One for the Orc Struct, and two for its members.",
-            3, footer.getColumnsCount());
-        assertEquals(
-            "The ORC schema struct should be DIRECT encoded.",
-            OrcProto.ColumnEncoding.Kind.DIRECT, footer.getColumns(0).getKind()
+            OrcProto.ColumnEncoding.Kind.DIRECT, footer.getColumns(0).getKind(),
+            "The ORC schema struct should be DIRECT encoded."
         );
         assertEquals(
-            "The shortString column must be DICTIONARY_V2 encoded",
-            OrcProto.ColumnEncoding.Kind.DICTIONARY_V2, footer.getColumns(1).getKind()
+            OrcProto.ColumnEncoding.Kind.DICTIONARY_V2, footer.getColumns(1).getKind(),
+            "The shortString column must be DICTIONARY_V2 encoded"
         );
         assertEquals(
-            "The longString column must be DIRECT_V2 encoded",
-            OrcProto.ColumnEncoding.Kind.DIRECT_V2, footer.getColumns(2).getKind()
+            OrcProto.ColumnEncoding.Kind.DIRECT_V2, footer.getColumns(2).getKind(),
+            "The longString column must be DIRECT_V2 encoded"
         );
       }
     }
   }
 
-  @Test
-  public void testForcedNonDictionary() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testForcedNonDictionary(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     // Set the row stride to 16k so that it is a multiple of the batch size
     final int INDEX_STRIDE = 16 * 1024;
     final int NUM_BATCHES = 50;
@@ -576,14 +573,14 @@ public class TestStringDictionary {
         OrcProto.RowIndexEntry entry = index.getEntry(e);
         // For a string column with direct encoding, compression & no nulls, we
         // should have 5 positions in each entry.
-        assertEquals("position count entry " + e, 5, entry.getPositionsCount());
+        assertEquals(5, entry.getPositionsCount(), "position count entry " + e);
         // make sure we can seek and get the right data
         int row = e * INDEX_STRIDE;
         rows.seekToRow(row);
-        assertTrue("entry " + e, rows.nextBatch(batch));
-        assertEquals("entry " + e, 1024, batch.size);
-        assertEquals("entry " + e, true, col.noNulls);
-        assertEquals("entry " + e, "Value for " + (row / 1024), col.toString(0));
+        assertTrue(rows.nextBatch(batch), "entry " + e);
+        assertEquals(1024, batch.size, "entry " + e);
+        assertEquals(true, col.noNulls, "entry " + e);
+        assertEquals("Value for " + (row / 1024), col.toString(0), "entry " + e);
       }
     }
   }
@@ -591,8 +588,10 @@ public class TestStringDictionary {
   /**
    * That when we disable dictionaries, we don't get broken row indexes.
    */
-  @Test
-  public void testRowIndex() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testRowIndex(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     TypeDescription schema =
         TypeDescription.fromString("struct<str:string>");
     // turn off the dictionaries
@@ -627,7 +626,7 @@ public class TestStringDictionary {
     while (recordReader.nextBatch(batch)) {
       for(int r=0; r < batch.size; ++r) {
         String value = String.format("row %06d", r + base);
-        assertEquals("row " + (r + base), value, strVector.toString(r));
+        assertEquals(value, strVector.toString(r), "row " + (r + base));
       }
       base += batch.size;
     }
@@ -638,8 +637,10 @@ public class TestStringDictionary {
   /**
    * Test that files written before ORC-569 are read correctly.
    */
-  @Test
-  public void testRowIndexPreORC569() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testRowIndexPreORC569(String dictImpl) throws Exception {
+    OrcConf.DICTIONARY_IMPL.setString(conf, dictImpl);
     testFilePath = new Path(System.getProperty("example.dir"), "TestStringDictionary.testRowIndex.orc");
     SearchArgument sarg = SearchArgumentFactory.newBuilder(conf)
         .lessThan("str", PredicateLeaf.Type.STRING, "row 001000")
@@ -652,7 +653,7 @@ public class TestStringDictionary {
         while (recordReader.nextBatch(batch)) {
           for (int r = 0; r < batch.size; ++r) {
             String value = String.format("row %06d", r + base);
-            assertEquals("row " + (r + base), value, strVector.toString(r));
+            assertEquals(value, strVector.toString(r), "row " + (r + base));
           }
           base += batch.size;
         }
diff --git a/java/core/src/test/org/apache/orc/TestTypeDescription.java b/java/core/src/test/org/apache/orc/TestTypeDescription.java
index 9853954..e089794 100644
--- a/java/core/src/test/org/apache/orc/TestTypeDescription.java
+++ b/java/core/src/test/org/apache/orc/TestTypeDescription.java
@@ -17,11 +17,8 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -29,14 +26,8 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 public class TestTypeDescription {
-  @Rule
-  public ExpectedException thrown= ExpectedException.none();
-
   @Test
   public void testJson() {
     TypeDescription bin = TypeDescription.createBinary();
@@ -163,58 +154,66 @@ public class TestTypeDescription {
 
   @Test
   public void testMissingField() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Missing name at 'struct<^'");
-    TypeDescription.fromString("struct<");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("struct<");
+    });
+    assertTrue(e.getMessage().contains("Missing name at 'struct<^'"));
   }
 
   @Test
   public void testQuotedField1() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Unmatched quote at 'struct<^`abc'");
-    TypeDescription.fromString("struct<`abc");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("struct<`abc");
+    });
+    assertTrue(e.getMessage().contains("Unmatched quote at 'struct<^`abc'"));
   }
 
   @Test
   public void testQuotedField2() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Empty quoted field name at 'struct<``^:int>'");
-    TypeDescription.fromString("struct<``:int>");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("struct<``:int>");
+    });
+    assertTrue(e.getMessage().contains("Empty quoted field name at 'struct<``^:int>'"));
   }
 
   @Test
   public void testParserUnknownCategory() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Can't parse category at 'FOOBAR^'");
-    TypeDescription.fromString("FOOBAR");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("FOOBAR");
+    });
+    assertTrue(e.getMessage().contains("Can't parse category at 'FOOBAR^'"));
   }
 
   @Test
   public void testParserEmptyCategory() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Can't parse category at '^<int>'");
-    TypeDescription.fromString("<int>");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("<int>");
+    });
+    assertTrue(e.getMessage().contains("Can't parse category at '^<int>'"));
   }
 
   @Test
   public void testParserMissingInt() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Missing integer at 'char(^)'");
-    TypeDescription.fromString("char()");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("char()");
+    });
+    assertTrue(e.getMessage().contains("Missing integer at 'char(^)'"));
   }
 
   @Test
   public void testParserMissingSize() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Missing required char '(' at 'struct<c:char^>'");
-    TypeDescription.fromString("struct<c:char>");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("struct<c:char>");
+    });
+    assertTrue(e.getMessage().contains("Missing required char '(' at 'struct<c:char^>'"));
   }
 
   @Test
   public void testParserExtraStuff() {
-    thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Extra characters at 'struct<i:int>^,'");
-    TypeDescription.fromString("struct<i:int>,");
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> {
+      TypeDescription.fromString("struct<i:int>,");
+    });
+    assertTrue(e.getMessage().contains("Extra characters at 'struct<i:int>^,'"));
   }
 
   @Test
@@ -462,7 +461,7 @@ public class TestTypeDescription {
     assertEquals(3, clearAttributes(schema));
   }
 
-  @Test(expected = IllegalArgumentException.class)
+  @Test
   public void testEncryptionConflict() {
     TypeDescription schema = TypeDescription.fromString(
         "struct<" +
@@ -470,10 +469,11 @@ public class TestTypeDescription {
             "address:struct<street:string,city:string,country:string,post_code:string>," +
             "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>");
     // set some encryption
-    schema.annotateEncryption("pii:address,personal:address",null);
+    assertThrows(IllegalArgumentException.class, () ->
+        schema.annotateEncryption("pii:address,personal:address",null));
   }
 
-  @Test(expected = IllegalArgumentException.class)
+  @Test
   public void testMaskConflict() {
     TypeDescription schema = TypeDescription.fromString(
         "struct<" +
@@ -481,6 +481,7 @@ public class TestTypeDescription {
             "address:struct<street:string,city:string,country:string,post_code:string>," +
             "credit_cards:array<struct<card_number:string,expire:date,ccv:string>>>");
     // set some encryption
-    schema.annotateEncryption(null,"nullify:name;sha256:name");
+    assertThrows(IllegalArgumentException.class, () ->
+        schema.annotateEncryption(null,"nullify:name;sha256:name"));
   }
 }
diff --git a/java/core/src/test/org/apache/orc/TestUnicode.java b/java/core/src/test/org/apache/orc/TestUnicode.java
index c2462e6..2cfcc10 100644
--- a/java/core/src/test/org/apache/orc/TestUnicode.java
+++ b/java/core/src/test/org/apache/orc/TestUnicode.java
@@ -17,12 +17,15 @@
  */
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
-import java.util.Collection;
+import java.util.stream.Stream;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -30,15 +33,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Parameterized.class)
+
 public class TestUnicode {
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test"
       + File.separator + "tmp"));
@@ -47,26 +42,14 @@ public class TestUnicode {
   FileSystem fs;
   Path testFilePath;
 
-  private final String type;
-  private final int maxLength;
-  private final boolean hasRTrim;
-
-  @Parameters
-  public static Collection<Object[]> data() {
-    ArrayList<Object[]> data = new ArrayList<>();
+  private static Stream<Arguments> data() {
+    ArrayList<Arguments> data = new ArrayList<>();
     for (int j = 0; j < 2; j++) {
       for (int i = 1; i <= 5; i++) {
-        data.add(new Object[] { j == 0 ? "char" : "varchar", i, true });
+        data.add(Arguments.of(j == 0 ? "char" : "varchar", i, true));
       }
     }
-    //data.add(new Object[] {"char", 3});
-    return data;
-  }
-
-  public TestUnicode(String type, int maxLength, boolean hasRTrim) {
-    this.type = type;
-    this.maxLength = maxLength;
-    this.hasRTrim = hasRTrim;
+    return data.stream();
   }
 
   static final String[] utf8strs = new String[] {
@@ -79,23 +62,22 @@ public class TestUnicode {
       "\u270f\ufe0f\ud83d\udcdd\u270f\ufe0f", "\ud83c\udf3b\ud83d\udc1d\ud83c\udf6f",
       "\ud83c\udf7a\ud83e\udd43\ud83c\udf77" };
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestOrcFile." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestOrcFile." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @Test
-  public void testUtf8() throws Exception {
-    if (type == "varchar") {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testUtf8(String type, int maxLength, boolean hasRTrim) throws Exception {
+    if (type.equals("varchar")) {
       testVarChar(maxLength);
     } else {
-      testChar(maxLength);
+      testChar(maxLength, hasRTrim);
     }
   }
 
@@ -140,14 +122,14 @@ public class TestUnicode {
     return val;
   }
 
-  public void testChar(int maxLength) throws Exception {
+  public void testChar(int maxLength, boolean hasRTrim) throws Exception {
     // char(n)
     TypeDescription schema = TypeDescription.createChar().withMaxLength(maxLength);
     String[] expected = new String[utf8strs.length];
     for (int i = 0; i < utf8strs.length; i++) {
       expected[i] = getPaddedValue(utf8strs[i], maxLength, hasRTrim);
     }
-    verifyWrittenStrings(schema, utf8strs, expected);
+    verifyWrittenStrings(schema, utf8strs, expected, maxLength);
   }
 
   public void testVarChar(int maxLength) throws Exception {
@@ -157,10 +139,10 @@ public class TestUnicode {
     for (int i = 0; i < utf8strs.length; i++) {
       expected[i] = enforceMaxLength(utf8strs[i], maxLength);
     }
-    verifyWrittenStrings(schema, utf8strs, expected);
+    verifyWrittenStrings(schema, utf8strs, expected, maxLength);
   }
 
-  public void verifyWrittenStrings(TypeDescription schema, String[] inputs, String[] expected)
+  public void verifyWrittenStrings(TypeDescription schema, String[] inputs, String[] expected, int maxLength)
       throws Exception {
     Writer writer =
         OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).setSchema(schema)
@@ -185,8 +167,8 @@ public class TestUnicode {
     int idx = 0;
     while (rows.nextBatch(batch)) {
       for (int r = 0; r < batch.size; ++r) {
-        assertEquals(String.format("test for %s:%d", schema, maxLength), expected[idx],
-            toString(col, r));
+        assertEquals(expected[idx], toString(col, r),
+            String.format("test for %s:%d", schema, maxLength));
         idx++;
       }
     }
diff --git a/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java b/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
index ef8fcd0..db860e3 100644
--- a/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
+++ b/java/core/src/test/org/apache/orc/TestUnrolledBitPack.java
@@ -18,44 +18,39 @@
 
 package org.apache.orc;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.*;
 
 import java.io.File;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
+import java.util.stream.Stream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
 
-@RunWith(value = Parameterized.class)
 public class TestUnrolledBitPack {
 
-  private long val;
-
-  public TestUnrolledBitPack(long val) {
-    this.val = val;
-  }
-
-  @Parameters
-  public static Collection<Object[]> data() {
-    Object[][] data = new Object[][] { { -1 }, { 1 }, { 7 }, { -128 }, { 32000 }, { 8300000 },
-        { Integer.MAX_VALUE }, { 540000000000L }, { 140000000000000L }, { 36000000000000000L },
-        { Long.MAX_VALUE } };
-    return Arrays.asList(data);
+  private static Stream<Arguments> data() {
+    return Stream.of(
+        Arguments.of(-1),
+        Arguments.of(1),
+        Arguments.of(7),
+        Arguments.of(-128),
+        Arguments.of(32000),
+        Arguments.of(8300000),
+        Arguments.of(Integer.MAX_VALUE),
+        Arguments.of(540000000000L),
+        Arguments.of(140000000000000L),
+        Arguments.of(36000000000000000L),
+        Arguments.of(Long.MAX_VALUE));
   }
 
   Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test"
@@ -65,19 +60,18 @@ public class TestUnrolledBitPack {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestOrcFile." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestOrcFile." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @Test
-  public void testBitPacking() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBitPacking(long val) throws Exception {
     TypeDescription schema = TypeDescription.createLong();
 
     long[] inp = new long[] { val, 0, val, val, 0, val, 0, val, val, 0, val, 0, val, val, 0, 0,
diff --git a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
index 31be42f..4f44382 100644
--- a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
+++ b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
@@ -33,6 +33,7 @@ import com.google.common.collect.Lists;
 import org.apache.orc.impl.ReaderImpl;
 import org.apache.orc.impl.reader.ReaderEncryption;
 import org.apache.orc.impl.reader.StripePlanner;
+import org.apache.orc.OrcFile.Version;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -62,10 +63,8 @@ import org.apache.orc.impl.RecordReaderUtils;
 import org.junit.jupiter.api.*;
 import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assumptions.*;
-
 import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.Arguments;
-import org.junit.jupiter.params.provider.MethodSource;
+import org.junit.jupiter.params.provider.*;
 import org.mockito.Mockito;
 
 import javax.xml.bind.DatatypeConverter;
diff --git a/java/core/src/test/org/apache/orc/impl/TestBitFieldReader.java b/java/core/src/test/org/apache/orc/impl/TestBitFieldReader.java
index 6f54c77..37cfded 100644
--- a/java/core/src/test/org/apache/orc/impl/TestBitFieldReader.java
+++ b/java/core/src/test/org/apache/orc/impl/TestBitFieldReader.java
@@ -17,13 +17,13 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.nio.ByteBuffer;
 
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Test;
 
 public class TestBitFieldReader {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestBitPack.java b/java/core/src/test/org/apache/orc/impl/TestBitPack.java
index a26b130..bce8424 100644
--- a/java/core/src/test/org/apache/orc/impl/TestBitPack.java
+++ b/java/core/src/test/org/apache/orc/impl/TestBitPack.java
@@ -17,8 +17,8 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -30,10 +30,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 import com.google.common.primitives.Longs;
 
@@ -48,14 +44,12 @@ public class TestBitPack {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestOrcFile." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestOrcFile." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java b/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
index 9b8fed1..f2cf283 100644
--- a/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestColumnStatisticsImpl.java
@@ -27,14 +27,12 @@ import org.apache.orc.OrcProto;
 import org.apache.orc.Reader;
 import org.apache.orc.TimestampColumnStatistics;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestColumnStatisticsImpl {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java b/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
index 5c5016b..0cb3292 100644
--- a/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
+++ b/java/core/src/test/org/apache/orc/impl/TestConvertTreeReaderFactory.java
@@ -46,13 +46,9 @@ import org.apache.orc.RecordReader;
 import org.apache.orc.TestProlepticConversions;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestConvertTreeReaderFactory {
 
@@ -64,17 +60,15 @@ public class TestConvertTreeReaderFactory {
   private Path testFilePath;
   private int LARGE_BATCH_SIZE;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void setupPath() throws Exception {
+  @BeforeEach
+  public void setupPath(TestInfo testInfo) throws Exception {
     // Default CV length is 1024
     this.LARGE_BATCH_SIZE = 1030;
     this.conf = new Configuration();
     this.fs = FileSystem.getLocal(conf);
-    this.testFilePath = new Path(workDir, TestWriterImpl.class.getSimpleName() + testCaseName.getMethodName().
-        replaceFirst("\\[[0-9]+]", "") + ".orc");
+    this.testFilePath = new Path(workDir, TestWriterImpl.class.getSimpleName() +
+        testInfo.getTestMethod().get().getName().replaceFirst("\\[[0-9]+]", "") +
+        ".orc");
     fs.delete(testFilePath, false);
   }
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java b/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
index ed23e01..a726b77 100644
--- a/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
+++ b/java/core/src/test/org/apache/orc/impl/TestCryptoUtils.java
@@ -25,15 +25,14 @@ import org.apache.orc.InMemoryKeystore;
 import org.apache.orc.OrcConf;
 import org.apache.orc.OrcProto;
 import org.apache.orc.impl.reader.ReaderEncryptionVariant;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.security.Key;
 import java.util.List;
 import java.util.Random;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestCryptoUtils {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestDataReaderProperties.java b/java/core/src/test/org/apache/orc/impl/TestDataReaderProperties.java
index a0ee057..b9ed229 100644
--- a/java/core/src/test/org/apache/orc/impl/TestDataReaderProperties.java
+++ b/java/core/src/test/org/apache/orc/impl/TestDataReaderProperties.java
@@ -20,14 +20,12 @@ package org.apache.orc.impl;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.CompressionKind;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.util.function.Supplier;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.mock;
 
 public class TestDataReaderProperties {
@@ -87,27 +85,33 @@ public class TestDataReaderProperties {
     assertFalse(properties.getZeroCopy());
   }
 
-  @Test(expected = java.lang.NullPointerException.class)
+  @Test
   public void testEmptyBuild() {
-    DataReaderProperties.builder().build();
+    assertThrows(NullPointerException.class, () -> {
+      DataReaderProperties.builder().build();
+    });
   }
 
-  @Test(expected = java.lang.NullPointerException.class)
+  @Test
   public void testMissingPath() {
-    DataReaderProperties.builder()
-      .withFileSystemSupplier(mockedSupplier)
-      .withCompression(InStream.options())
-      .withZeroCopy(mockedZeroCopy)
-      .build();
+    assertThrows(NullPointerException.class, () -> {
+      DataReaderProperties.builder()
+        .withFileSystemSupplier(mockedSupplier)
+        .withCompression(InStream.options())
+        .withZeroCopy(mockedZeroCopy)
+        .build();
+    });
   }
 
-  @Test(expected = java.lang.NullPointerException.class)
+  @Test
   public void testMissingFileSystem() {
-    DataReaderProperties.builder()
-      .withPath(mockedPath)
-      .withCompression(InStream.options())
-      .withZeroCopy(mockedZeroCopy)
-      .build();
+    assertThrows(NullPointerException.class, () -> {
+      DataReaderProperties.builder()
+        .withPath(mockedPath)
+        .withCompression(InStream.options())
+        .withZeroCopy(mockedZeroCopy)
+        .build();
+    });
   }
 
 }
diff --git a/java/core/src/test/org/apache/orc/impl/TestDateUtils.java b/java/core/src/test/org/apache/orc/impl/TestDateUtils.java
index 5c75a1d..fbf65d5 100644
--- a/java/core/src/test/org/apache/orc/impl/TestDateUtils.java
+++ b/java/core/src/test/org/apache/orc/impl/TestDateUtils.java
@@ -18,9 +18,8 @@
 
 package org.apache.orc.impl;
 
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestDateUtils {
   /**
@@ -51,10 +50,10 @@ public class TestDateUtils {
 
   void checkConversion(int dayOfEpoch, String hybrid, String proleptic) {
     String result = DateUtils.printDate(dayOfEpoch, false);
-    assertEquals("day " + dayOfEpoch, hybrid, result);
+    assertEquals(hybrid, result, "day " + dayOfEpoch);
     assertEquals(dayOfEpoch, (int) DateUtils.parseDate(result, false));
     result = DateUtils.printDate(dayOfEpoch, true);
-    assertEquals("day " + dayOfEpoch, proleptic, result);
+    assertEquals(proleptic, result, "day " + dayOfEpoch);
     assertEquals(dayOfEpoch, (int) DateUtils.parseDate(result, true));
   }
 }
diff --git a/java/core/src/test/org/apache/orc/impl/TestDynamicArray.java b/java/core/src/test/org/apache/orc/impl/TestDynamicArray.java
index af583f7..5783fdc 100644
--- a/java/core/src/test/org/apache/orc/impl/TestDynamicArray.java
+++ b/java/core/src/test/org/apache/orc/impl/TestDynamicArray.java
@@ -21,9 +21,9 @@ import java.util.Random;
 
 import org.apache.orc.impl.DynamicByteArray;
 import org.apache.orc.impl.DynamicIntArray;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestDynamicArray {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestInStream.java b/java/core/src/test/org/apache/orc/impl/TestInStream.java
index e6b05b5..73c73d8 100644
--- a/java/core/src/test/org/apache/orc/impl/TestInStream.java
+++ b/java/core/src/test/org/apache/orc/impl/TestInStream.java
@@ -18,13 +18,8 @@
 
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotSame;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
@@ -41,7 +36,6 @@ import org.apache.orc.EncryptionAlgorithm;
 import org.apache.orc.OrcProto;
 import org.apache.orc.PhysicalWriter;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Test;
 
 public class TestInStream {
 
@@ -147,11 +141,11 @@ public class TestInStream {
         in.toString());
     for(int i=0; i < 1024; ++i) {
       int x = in.read();
-      assertEquals("value " + i, i & 0xff, x);
+      assertEquals(i & 0xff, x, "value " + i);
     }
     for(int i=1023; i >= 0; --i) {
       in.seek(positions[i]);
-      assertEquals("value " + i, i & 0xff, in.read());
+      assertEquals(i & 0xff, in.read(), "value " + i);
     }
   }
 
@@ -217,11 +211,11 @@ public class TestInStream {
                        " range: 0 offset: 0 position: 0 limit: 1965",
           in.toString());
       for (int i = 0; i < ROW_COUNT; ++i) {
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
       for (int i = ROW_COUNT - 1; i >= 0; --i) {
         in.seek(positions[i]);
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
     }
   }
@@ -265,11 +259,11 @@ public class TestInStream {
                        " range: 0 offset: 0 position: 0 limit: 1965",
           in.toString());
       for (int i = 0; i < ROW_COUNT; ++i) {
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
       for (int i = ROW_COUNT - 1; i >= 0; --i) {
         in.seek(positions[i]);
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
     }
   }
@@ -344,11 +338,11 @@ public class TestInStream {
                        (bytes.length - 15) + " to " + bytes.length,
           in.toString());
       for (int i = 0; i < ROW_COUNT; ++i) {
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
       for (int i = ROW_COUNT - 1; i >= 0; --i) {
         in.seek(positions[i]);
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
     }
   }
@@ -398,11 +392,11 @@ public class TestInStream {
                        "  range 1 = 2100 to 4044;  range 2 = 4044 to 5044",
           in.toString());
       for (int i = 0; i < ROW_COUNT; ++i) {
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
       for (int i = ROW_COUNT - 1; i >= 0; --i) {
         in.seek(positions[i]);
-        assertEquals("row " + i, i * DATA_CONST, inputStream.readLong());
+        assertEquals(i * DATA_CONST, inputStream.readLong(), "row " + i);
       }
     }
   }
@@ -845,7 +839,7 @@ public class TestInStream {
     byte[] inBuffer = new byte[4096];
     assertEquals(4096, inStream.read(inBuffer));
     for(int i=0; i < inBuffer.length; ++i) {
-      assertEquals("position " + i, (byte)i, inBuffer[i]);
+      assertEquals((byte)i, inBuffer[i], "position " + i);
     }
   }
 
@@ -891,10 +885,9 @@ public class TestInStream {
     posn = 0;
     int read = inStream.read(inBuffer);
     while (read != -1) {
-      assertEquals("Read length at " + posn,
-          Math.min(STREAM_LENGTH - posn, CHUNK_LENGTH), read);
+      assertEquals(Math.min(STREAM_LENGTH - posn, CHUNK_LENGTH), read, "Read length at " + posn);
       for(int i=0; i < read; ++i) {
-        assertEquals("posn " + posn + " + " + i, (byte)(posn + i), inBuffer[i]);
+        assertEquals((byte)(posn + i), inBuffer[i], "posn " + posn + " + " + i);
       }
       posn += read;
       read = inStream.read(inBuffer);
@@ -935,7 +928,7 @@ public class TestInStream {
     byte[] inBuffer = new byte[4096];
     assertEquals(4096, inStream.read(inBuffer));
     for(int i=0; i < inBuffer.length; ++i) {
-      assertEquals("position " + i, (byte)i, inBuffer[i]);
+      assertEquals((byte)i, inBuffer[i], "position " + i);
     }
   }
 
@@ -956,7 +949,7 @@ public class TestInStream {
     byte[] inBuffer = new byte[4096];
     assertEquals(4096, inStream.read(inBuffer));
     for(int i=0; i < inBuffer.length; ++i) {
-      assertEquals("position " + i, (byte)i, inBuffer[i]);
+      assertEquals((byte)i, inBuffer[i], "position " + i);
     }
   }
 }
diff --git a/java/core/src/test/org/apache/orc/impl/TestIntegerCompressionReader.java b/java/core/src/test/org/apache/orc/impl/TestIntegerCompressionReader.java
index 6bf4e1a..c392cc8 100644
--- a/java/core/src/test/org/apache/orc/impl/TestIntegerCompressionReader.java
+++ b/java/core/src/test/org/apache/orc/impl/TestIntegerCompressionReader.java
@@ -22,9 +22,9 @@ import java.util.Random;
 
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestIntegerCompressionReader {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java b/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
index b9ce719..c85ae91 100644
--- a/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
+++ b/java/core/src/test/org/apache/orc/impl/TestMemoryManager.java
@@ -20,16 +20,13 @@ package org.apache.orc.impl;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.orc.MemoryManager;
-import org.hamcrest.BaseMatcher;
-import org.hamcrest.Description;
-import org.junit.Test;
 import static org.mockito.ArgumentMatchers.eq;
 import org.mockito.Mockito;
 
 import java.lang.management.ManagementFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Test the ORC memory manager.
@@ -80,33 +77,8 @@ public class TestMemoryManager {
         ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
     System.err.print("Memory = " + mem);
     long pool = mgr.getTotalMemoryPool();
-    assertTrue("Pool too small: " + pool, mem * 0.899 < pool);
-    assertTrue("Pool too big: " + pool, pool < mem * 0.901);
-  }
-
-  private static class DoubleMatcher extends BaseMatcher<Double> {
-    final double expected;
-    final double error;
-    DoubleMatcher(double expected, double error) {
-      this.expected = expected;
-      this.error = error;
-    }
-
-    @Override
-    public boolean matches(Object val) {
-      double dbl = (Double) val;
-      return Math.abs(dbl - expected) <= error;
-    }
-
-    @Override
-    public void describeTo(Description description) {
-      description.appendText("not sufficiently close to ");
-      description.appendText(Double.toString(expected));
-    }
-  }
-
-  private static DoubleMatcher closeTo(double value, double error) {
-    return new DoubleMatcher(value, error);
+    assertTrue(mem * 0.899 < pool, "Pool too small: " + pool);
+    assertTrue(pool < mem * 0.901, "Pool too big: " + pool);
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/impl/TestOrcFilterContextImpl.java b/java/core/src/test/org/apache/orc/impl/TestOrcFilterContextImpl.java
index 5218157..b4d367c 100644
--- a/java/core/src/test/org/apache/orc/impl/TestOrcFilterContextImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestOrcFilterContextImpl.java
@@ -24,22 +24,12 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.orc.TypeDescription;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThrows;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestOrcFilterContextImpl {
 
@@ -50,9 +40,6 @@ public class TestOrcFilterContextImpl {
       .addField("f2b", TypeDescription.createString()))
     .addField("f3", TypeDescription.createString());
 
-  @Rule
-  public final ExpectedException thrown = ExpectedException.none();
-
   @Test
   public void testSuccessfulRetrieval() {
     VectorizedRowBatch b = createBatch();
@@ -86,9 +73,9 @@ public class TestOrcFilterContextImpl {
     fc.setBatch(b);
 
     // Missing field at top level
-    IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
-                                                      () -> fc.findColumnVector("f4"));
-    assertThat(exception.getMessage(), containsString("Field f4 not found in"));
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
+                                              () -> fc.findColumnVector("f4"));
+    assertTrue(e.getMessage().contains("Field f4 not found in"));
   }
 
   @Test
@@ -98,10 +85,10 @@ public class TestOrcFilterContextImpl {
     fc.setBatch(b);
 
     // Missing field at top level
-    IllegalArgumentException exception = assertThrows(IllegalArgumentException.class,
-                                                      () -> fc.findColumnVector("f2.c"));
-    assertThat(exception.getMessage(),
-               containsString("Field c not found in struct<f2a:bigint,f2b:string>"));
+    IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
+                                              () -> fc.findColumnVector("f2.c"));
+    assertTrue(e.getMessage().contains(
+        "Field c not found in struct<f2a:bigint,f2b:string>"));
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java b/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
index 4da92a6..1064f4b 100644
--- a/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
+++ b/java/core/src/test/org/apache/orc/impl/TestOrcLargeStripe.java
@@ -15,9 +15,9 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assumptions.*;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.anyLong;
 import static org.mockito.ArgumentMatchers.eq;
@@ -47,15 +47,11 @@ import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class TestOrcLargeStripe {
 
   private Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test"
@@ -65,14 +61,12 @@ public class TestOrcLargeStripe {
   FileSystem fs;
   private Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem() throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testFilePath = new Path(workDir, "TestOrcFile." + testCaseName.getMethodName() + ".orc");
+    testFilePath = new Path(workDir, "TestOrcFile." +
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
@@ -141,8 +135,6 @@ public class TestOrcLargeStripe {
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
     TypeDescription schema = TypeDescription.createTimestamp();
-    testFilePath = new Path(workDir, "TestOrcLargeStripe." +
-      testCaseName.getMethodName() + ".orc");
     fs.delete(testFilePath, false);
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000).bufferSize(10000)
diff --git a/java/core/src/test/org/apache/orc/impl/TestOrcWideTable.java b/java/core/src/test/org/apache/orc/impl/TestOrcWideTable.java
index 289a86e..7eb15c7 100644
--- a/java/core/src/test/org/apache/orc/impl/TestOrcWideTable.java
+++ b/java/core/src/test/org/apache/orc/impl/TestOrcWideTable.java
@@ -18,12 +18,11 @@
 
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.IOException;
 
-import org.junit.Test;
-
 public class TestOrcWideTable {
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/impl/TestOutStream.java b/java/core/src/test/org/apache/orc/impl/TestOutStream.java
index 41eef50..9dde2da 100644
--- a/java/core/src/test/org/apache/orc/impl/TestOutStream.java
+++ b/java/core/src/test/org/apache/orc/impl/TestOutStream.java
@@ -18,8 +18,9 @@
 
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assumptions.*;
 
 import java.io.BufferedReader;
 import java.io.InputStreamReader;
@@ -35,8 +36,6 @@ import org.apache.orc.InMemoryKeystore;
 import org.apache.orc.OrcProto;
 import org.apache.orc.PhysicalWriter;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Assume;
-import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestOutStream {
@@ -151,7 +150,7 @@ public class TestOutStream {
       };
       assertEquals(generated.length, output.length);
       for (int i = 0; i < generated.length; ++i) {
-        assertEquals("i = " + i, (byte) (generated[i] ^ data[i]), output[i]);
+        assertEquals((byte) (generated[i] ^ data[i]), output[i], "i = " + i);
       }
 
       receiver.buffer.clear();
@@ -164,7 +163,7 @@ public class TestOutStream {
       generated = new int[]{0x16, 0x03, 0xE6, 0xC3};
       assertEquals(generated.length, output.length);
       for (int i = 0; i < generated.length; ++i) {
-        assertEquals("i = " + i, (byte) (generated[i] ^ data[i]), output[i]);
+        assertEquals((byte) (generated[i] ^ data[i]), output[i], "i = " + i);
       }
     }
   }
@@ -172,7 +171,7 @@ public class TestOutStream {
   @Test
   public void testCompression256Encryption() throws Exception {
     // disable test if AES_256 is not available
-    Assume.assumeTrue(InMemoryKeystore.SUPPORTS_AES_256);
+    assumeTrue(InMemoryKeystore.SUPPORTS_AES_256);
     TestInStream.OutputCollector receiver = new TestInStream.OutputCollector();
     EncryptionAlgorithm aes256 = EncryptionAlgorithm.AES_CTR_256;
     byte[] keyBytes = new byte[aes256.keyLength()];
@@ -212,7 +211,7 @@ public class TestOutStream {
              StandardCharsets.UTF_8))) {
       // check the contents of the decompressed stream
       for (int i = 0; i < 10000; ++i) {
-        assertEquals("i = " + i, "The Cheesy Poofs " + i, reader.readLine());
+        assertEquals("The Cheesy Poofs " + i, reader.readLine(), "i = " + i);
       }
       assertEquals(null, reader.readLine());
     }
diff --git a/java/core/src/test/org/apache/orc/impl/TestPhysicalFsWriter.java b/java/core/src/test/org/apache/orc/impl/TestPhysicalFsWriter.java
index 6028307..9ad856c 100644
--- a/java/core/src/test/org/apache/orc/impl/TestPhysicalFsWriter.java
+++ b/java/core/src/test/org/apache/orc/impl/TestPhysicalFsWriter.java
@@ -31,7 +31,6 @@ import org.apache.orc.OrcFile;
 import org.apache.orc.OrcProto;
 import org.apache.orc.PhysicalWriter;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.net.URI;
@@ -43,7 +42,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestPhysicalFsWriter {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestPredicatePushDownBounds.java b/java/core/src/test/org/apache/orc/impl/TestPredicatePushDownBounds.java
index 55f54ac..430f983 100644
--- a/java/core/src/test/org/apache/orc/impl/TestPredicatePushDownBounds.java
+++ b/java/core/src/test/org/apache/orc/impl/TestPredicatePushDownBounds.java
@@ -24,13 +24,13 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.orc.IntegerColumnStatistics;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.util.BloomFilter;
-import org.junit.Test;
 
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.apache.orc.impl.TestRecordReaderImpl.createPredicateLeaf;
 
 public class TestPredicatePushDownBounds {
diff --git a/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java b/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
index 3082c15..f6bb1be 100644
--- a/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestReaderImpl.java
@@ -15,8 +15,8 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.EOFException;
@@ -49,24 +49,17 @@ import org.apache.orc.RecordReader;
 import org.apache.orc.StripeStatistics;
 import org.apache.orc.TestVectorOrcFile;
 import org.apache.orc.TypeDescription;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 public class TestReaderImpl {
   private Path workDir = new Path(System.getProperty("example.dir",
       "../../examples/"));
 
-  @Rule
-  public ExpectedException thrown = ExpectedException.none();
-
   private final Path path = new Path("test-file.orc");
   private FSDataInputStream in;
   private int psLen;
   private ByteBuffer buffer;
 
-  @Before
+  @BeforeEach
   public void setup() {
     in = null;
   }
@@ -74,15 +67,17 @@ public class TestReaderImpl {
   @Test
   public void testEnsureOrcFooterSmallTextFile() throws IOException {
     prepareTestCase("1".getBytes(StandardCharsets.UTF_8));
-    thrown.expect(FileFormatException.class);
-    ReaderImpl.ensureOrcFooter(in, path, psLen, buffer);
+    assertThrows(FileFormatException.class, () -> {
+      ReaderImpl.ensureOrcFooter(in, path, psLen, buffer);
+    });
   }
 
   @Test
   public void testEnsureOrcFooterLargeTextFile() throws IOException {
     prepareTestCase("This is Some Text File".getBytes(StandardCharsets.UTF_8));
-    thrown.expect(FileFormatException.class);
-    ReaderImpl.ensureOrcFooter(in, path, psLen, buffer);
+    assertThrows(FileFormatException.class, () -> {
+      ReaderImpl.ensureOrcFooter(in, path, psLen, buffer);
+    });
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
index 98aa336..eaa9ecf 100644
--- a/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestRecordReaderImpl.java
@@ -18,10 +18,8 @@
 
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.Mockito.atLeastOnce;
@@ -91,7 +89,6 @@ import org.apache.orc.OrcProto;
 
 import org.apache.orc.util.BloomFilterIO;
 import org.apache.orc.util.BloomFilterUtf8;
-import org.junit.Test;
 
 public class TestRecordReaderImpl {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestRunLengthByteReader.java b/java/core/src/test/org/apache/orc/impl/TestRunLengthByteReader.java
index cc81dbc..758fb4b 100644
--- a/java/core/src/test/org/apache/orc/impl/TestRunLengthByteReader.java
+++ b/java/core/src/test/org/apache/orc/impl/TestRunLengthByteReader.java
@@ -21,9 +21,9 @@ import java.nio.ByteBuffer;
 
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestRunLengthByteReader {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestRunLengthIntegerReader.java b/java/core/src/test/org/apache/orc/impl/TestRunLengthIntegerReader.java
index e57a1ba..6eead8b 100644
--- a/java/core/src/test/org/apache/orc/impl/TestRunLengthIntegerReader.java
+++ b/java/core/src/test/org/apache/orc/impl/TestRunLengthIntegerReader.java
@@ -22,9 +22,9 @@ import java.util.Random;
 
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.impl.writer.StreamOptions;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestRunLengthIntegerReader {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java b/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
index 9ada68e..102b396 100644
--- a/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
+++ b/java/core/src/test/org/apache/orc/impl/TestSchemaEvolution.java
@@ -17,11 +17,8 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -56,16 +53,9 @@ import org.apache.orc.impl.reader.StripePlanner;
 import org.apache.orc.impl.reader.tree.BatchReader;
 import org.apache.orc.impl.reader.tree.StructBatchReader;
 import org.apache.orc.impl.reader.tree.TypeReader;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
 
 public class TestSchemaEvolution {
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
   Configuration conf;
   Reader.Options options;
   Path testFilePath;
@@ -73,13 +63,13 @@ public class TestSchemaEvolution {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
 
-  @Before
-  public void setup() throws Exception {
+  @BeforeEach
+  public void setup(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     options = new Reader.Options(conf);
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     fs.delete(testFilePath, false);
   }
 
@@ -337,9 +327,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testFloatToDoubleEvolution() throws Exception {
+  public void testFloatToDoubleEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-        testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createFloat();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -364,9 +354,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testFloatToDecimalEvolution() throws Exception {
+  public void testFloatToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+      testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createFloat();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -391,9 +381,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testFloatToDecimal64Evolution() throws Exception {
+  public void testFloatToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createFloat();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -418,9 +408,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testDoubleToDecimalEvolution() throws Exception {
+  public void testDoubleToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createDouble();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -445,9 +435,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testDoubleToDecimal64Evolution() throws Exception {
+  public void testDoubleToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createDouble();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -472,9 +462,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testLongToDecimalEvolution() throws Exception {
+  public void testLongToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createLong();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -499,9 +489,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testLongToDecimal64Evolution() throws Exception {
+  public void testLongToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createLong();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -526,9 +516,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testDecimalToDecimalEvolution() throws Exception {
+  public void testDecimalToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createDecimal().withPrecision(38).withScale(0);
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -553,9 +543,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testDecimalToDecimal64Evolution() throws Exception {
+  public void testDecimalToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createDecimal().withPrecision(38).withScale(2);
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -580,9 +570,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testBooleanToStringEvolution() throws Exception {
+  public void testBooleanToStringEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createBoolean();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -643,9 +633,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testStringToDecimalEvolution() throws Exception {
+  public void testStringToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createString();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -671,9 +661,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testStringToDecimal64Evolution() throws Exception {
+  public void testStringToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createString();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -699,9 +689,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testTimestampToDecimalEvolution() throws Exception {
+  public void testTimestampToDecimalEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createTimestamp();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -735,9 +725,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testTimestampToDecimal64Evolution() throws Exception {
+  public void testTimestampToDecimal64Evolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-      testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.createTimestamp();
     Writer writer = OrcFile.createWriter(testFilePath,
       OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -762,9 +752,9 @@ public class TestSchemaEvolution {
   }
 
   @Test
-  public void testTimestampToStringEvolution() throws Exception {
+  public void testTimestampToStringEvolution(TestInfo testInfo) throws Exception {
     testFilePath = new Path(workDir, "TestSchemaEvolution." +
-                                         testCaseName.getMethodName() + ".orc");
+        testInfo.getTestMethod().get().getName() + ".orc");
     TypeDescription schema = TypeDescription.fromString("timestamp");
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -1503,13 +1493,15 @@ public class TestSchemaEvolution {
     assertSame(original, mapped);
   }
 
-  @Test(expected = SchemaEvolution.IllegalEvolutionException.class)
+  @Test
   public void testIncompatibleTypes() {
-    TypeDescription fileType = TypeDescription.fromString("struct<a:int>");
-    TypeDescription readerType = TypeDescription.fromString("struct<a:date>");
-    boolean[] included = includeAll(readerType);
-    options.tolerateMissingSchema(false);
-    new SchemaEvolution(fileType, readerType, options.include(included));
+    assertThrows(SchemaEvolution.IllegalEvolutionException.class, () -> {
+      TypeDescription fileType = TypeDescription.fromString("struct<a:int>");
+      TypeDescription readerType = TypeDescription.fromString("struct<a:date>");
+      boolean[] included = includeAll(readerType);
+      options.tolerateMissingSchema(false);
+      new SchemaEvolution(fileType, readerType, options.include(included));
+    });
   }
 
   @Test
@@ -1529,7 +1521,7 @@ public class TestSchemaEvolution {
         evo.getReaderBaseSchema().toString());
     // the first stuff should be an identity
     for(int c=0; c < 8; ++c) {
-      assertEquals("column " + c, c, evo.getFileType(c).getId());
+      assertEquals(c, evo.getFileType(c).getId(), "column " + c);
     }
     // y and z should swap places
     assertEquals(9, evo.getFileType(8).getId());
@@ -1553,7 +1545,7 @@ public class TestSchemaEvolution {
         evo.getReaderBaseSchema().toString());
     // the first stuff should be an identity
     for(int c=0; c < 9; ++c) {
-      assertEquals("column " + c, c, evo.getFileType(c).getId());
+      assertEquals(c, evo.getFileType(c).getId(), "column " + c);
     }
     // the file doesn't have z
     assertEquals(null, evo.getFileType(9));
@@ -1579,16 +1571,16 @@ public class TestSchemaEvolution {
     boolean[] fileInclude = evo.getFileIncluded();
 
     //get top level struct col
-    assertEquals("column " + 0, 0, evo.getFileType(0).getId());
-    assertTrue("column " + 0, fileInclude[0]);
+    assertEquals(0, evo.getFileType(0).getId(), "column " + 0);
+    assertTrue(fileInclude[0], "column " + 0);
     for(int c=1; c < 6; ++c) {
-      assertNull("column " + c, evo.getFileType(c));
+      assertNull(evo.getFileType(c), "column " + c);
       //skip all acid metadata columns
-      assertFalse("column " + c, fileInclude[c]);
+      assertFalse(fileInclude[c], "column " + c);
     }
     for(int c=6; c < 9; ++c) {
-      assertEquals("column " + c, c, evo.getFileType(c).getId());
-      assertTrue("column " + c, fileInclude[c]);
+      assertEquals(c, evo.getFileType(c).getId(), "column " + c);
+      assertTrue(fileInclude[c], "column " + c);
     }
     // don't read the last column
     assertFalse(fileInclude[9]);
@@ -1612,8 +1604,8 @@ public class TestSchemaEvolution {
     // the first stuff should be an identity
     boolean[] fileInclude = evo.getFileIncluded();
     for(int c=0; c < 9; ++c) {
-      assertEquals("column " + c, c, evo.getFileType(c).getId());
-      assertTrue("column " + c, fileInclude[c]);
+      assertEquals(c, evo.getFileType(c).getId(), "column " + c);
+      assertTrue(fileInclude[c], "column " + c);
     }
     // don't read the last column
     assertFalse(fileInclude[9]);
@@ -1633,7 +1625,7 @@ public class TestSchemaEvolution {
     // the first stuff should be an identity
     boolean[] fileInclude = evo.getFileIncluded();
     for(int c=0; c < 9; ++c) {
-      assertEquals("column " + c, c, evo.getFileType(c).getId());
+      assertEquals(c, evo.getFileType(c).getId(), "column " + c);
     }
     assertEquals(10, evo.getFileType(9).getId());
     assertEquals(11, evo.getFileType(10).getId());
@@ -1641,7 +1633,7 @@ public class TestSchemaEvolution {
     assertEquals(12, evo.getFileType(12).getId());
     assertEquals(13, fileInclude.length);
     for(int c=0; c < fileInclude.length; ++c) {
-      assertTrue("column " + c, fileInclude[c]);
+      assertTrue(fileInclude[c], "column " + c);
     }
   }
 
@@ -1665,7 +1657,7 @@ public class TestSchemaEvolution {
     assertEquals(5, evo.getFileType(6).getId());
     assertEquals(6, fileInclude.length);
     for(int c=0; c < fileInclude.length; ++c) {
-      assertTrue("column " + c, fileInclude[c]);
+      assertTrue(fileInclude[c], "column " + c);
     }
   }
 
@@ -1740,10 +1732,9 @@ public class TestSchemaEvolution {
     assertEquals(true, batch.cols[0].isRepeating);
     assertEquals(true, batch.cols[0].isNull[0]);
     for(int r=0; r < 10; ++r) {
-      assertEquals("col1." + r, EXPECTED.substring(r, r+1),
-          ((BytesColumnVector) batch.cols[1]).toString(r));
-      assertEquals("col2." + r, r,
-          ((LongColumnVector) batch.cols[2]).vector[r]);
+      assertEquals(EXPECTED.substring(r, r+1),
+        ((BytesColumnVector) batch.cols[1]).toString(r), "col1." + r);
+      assertEquals(r, ((LongColumnVector) batch.cols[2]).vector[r], "col2." + r);
     }
   }
 
@@ -1975,15 +1966,17 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, (timeStrings[r] + " " +
-                                          READER_ZONE.getId()).replace(".7 ", " "),
-                longTimestampToString(t1.vector[current], READER_ZONE));
-            assertEquals("row " + r, (timeStrings[r] + " " +
-                                          WRITER_ZONE.getId()).replace(".7 ", " "),
-                longTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals(
+                (timeStrings[r] + " " + READER_ZONE.getId()).replace(".7 ", " "),
+                longTimestampToString(t1.vector[current], READER_ZONE),
+                "row " + r);
+            assertEquals(
+                (timeStrings[r] + " " + WRITER_ZONE.getId()).replace(".7 ", " "),
+                longTimestampToString(t2.vector[current], WRITER_ZONE),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -1998,13 +1991,13 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " " + READER_ZONE.getId(),
-                decimalTimestampToString(t1.vector[current], READER_ZONE));
-            assertEquals("row " + r, timeStrings[r] + " " + WRITER_ZONE.getId(),
-                decimalTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals( timeStrings[r] + " " + READER_ZONE.getId(),
+                decimalTimestampToString(t1.vector[current], READER_ZONE), "row " + r);
+            assertEquals(timeStrings[r] + " " + WRITER_ZONE.getId(),
+                decimalTimestampToString(t2.vector[current], WRITER_ZONE), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2019,13 +2012,13 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " " + READER_ZONE.getId(),
-                doubleTimestampToString(t1.vector[current], READER_ZONE));
-            assertEquals("row " + r, timeStrings[r] + " " + WRITER_ZONE.getId(),
-                doubleTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals( timeStrings[r] + " " + READER_ZONE.getId(),
+                doubleTimestampToString(t1.vector[current], READER_ZONE), "row " + r);
+            assertEquals( timeStrings[r] + " " + WRITER_ZONE.getId(),
+                doubleTimestampToString(t2.vector[current], WRITER_ZONE), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2040,17 +2033,17 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
             String date = timeStrings[r].substring(0, 10);
-            assertEquals("row " + r, date,
+            assertEquals(date,
                 ConvertTreeReaderFactory.DATE_FORMAT.format(
-                    LocalDate.ofEpochDay(t1.vector[current])));
+                    LocalDate.ofEpochDay(t1.vector[current])), "row " + r);
             // NYC -> Sydney moves forward a day for instant
-            assertEquals("row " + r, date.replace("-27", "-28"),
+            assertEquals(date.replace("-27", "-28"),
                 ConvertTreeReaderFactory.DATE_FORMAT.format(
-                    LocalDate.ofEpochDay(t2.vector[current])));
+                    LocalDate.ofEpochDay(t2.vector[current])), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2065,15 +2058,15 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r], bytesT1.toString(current));
+            assertEquals(timeStrings[r], bytesT1.toString(current), "row " + r);
             Instant t = Instant.from(WRITER_FORMAT.parse(timeStrings[r]));
-            assertEquals("row " + r,
+            assertEquals(
                 timestampToString(Instant.from(WRITER_FORMAT.parse(timeStrings[r])),
                     READER_ZONE),
-                bytesT2.toString(current));
+                bytesT2.toString(current), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2088,14 +2081,16 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " " + READER_ZONE.getId(),
-                timestampToString(timeT1.time[current], timeT1.nanos[current], READER_ZONE));
-            assertEquals("row " + r,
+            assertEquals(timeStrings[r] + " " + READER_ZONE.getId(),
+                timestampToString(timeT1.time[current], timeT1.nanos[current], READER_ZONE),
+                "row " + r);
+            assertEquals(
                 timestampToString(Instant.from(WRITER_FORMAT.parse(timeStrings[r])), READER_ZONE),
-                timestampToString(timeT2.time[current], timeT2.nanos[current], READER_ZONE));
+                timestampToString(timeT2.time[current], timeT2.nanos[current], READER_ZONE),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2118,15 +2113,15 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, (timeStrings[r] + " " +
-                                          UTC.getId()).replace(".7 ", " "),
-                longTimestampToString(t1.vector[current], UTC));
-            assertEquals("row " + r, (timeStrings[r] + " " +
-                                          WRITER_ZONE.getId()).replace(".7 ", " "),
-                longTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals(
+                (timeStrings[r] + " " + UTC.getId()).replace(".7 ", " "),
+                longTimestampToString(t1.vector[current], UTC), "row " + r);
+            assertEquals(
+                (timeStrings[r] + " " + WRITER_ZONE.getId()).replace(".7 ", " "),
+                longTimestampToString(t2.vector[current], WRITER_ZONE), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2141,13 +2136,13 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " " + UTC.getId(),
-                decimalTimestampToString(t1.vector[current], UTC));
-            assertEquals("row " + r, timeStrings[r] + " " + WRITER_ZONE.getId(),
-                decimalTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals(timeStrings[r] + " " + UTC.getId(),
+                decimalTimestampToString(t1.vector[current], UTC), "row " + r);
+            assertEquals(timeStrings[r] + " " + WRITER_ZONE.getId(),
+                decimalTimestampToString(t2.vector[current], WRITER_ZONE), "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2162,13 +2157,15 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " " + UTC.getId(),
-                doubleTimestampToString(t1.vector[current], UTC));
-            assertEquals("row " + r, timeStrings[r] + " " + WRITER_ZONE.getId(),
-                doubleTimestampToString(t2.vector[current], WRITER_ZONE));
+            assertEquals(timeStrings[r] + " " + UTC.getId(),
+                doubleTimestampToString(t1.vector[current], UTC),
+                "row " + r);
+            assertEquals(timeStrings[r] + " " + WRITER_ZONE.getId(),
+                doubleTimestampToString(t2.vector[current], WRITER_ZONE),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2183,17 +2180,19 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
             String date = timeStrings[r].substring(0, 10);
-            assertEquals("row " + r, date,
+            assertEquals(date,
                 ConvertTreeReaderFactory.DATE_FORMAT.format(
-                    LocalDate.ofEpochDay(t1.vector[current])));
+                    LocalDate.ofEpochDay(t1.vector[current])),
+                "row " + r);
             // NYC -> UTC still moves forward a day
-            assertEquals("row " + r, date.replace("-27", "-28"),
+            assertEquals(date.replace("-27", "-28"),
                 ConvertTreeReaderFactory.DATE_FORMAT.format(
-                    LocalDate.ofEpochDay(t2.vector[current])));
+                    LocalDate.ofEpochDay(t2.vector[current])),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2208,14 +2207,15 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r], bytesT1.toString(current));
-            assertEquals("row " + r,
+            assertEquals(timeStrings[r], bytesT1.toString(current), "row " + r);
+            assertEquals(
                 timestampToString(Instant.from(WRITER_FORMAT.parse(timeStrings[r])),
                     UTC),
-                bytesT2.toString(current));
+                bytesT2.toString(current),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2230,14 +2230,16 @@ public class TestSchemaEvolution {
           int current = 0;
           for (int r = 0; r < VALUES; ++r) {
             if (current == batch.size) {
-              assertEquals("row " + r, true, rows.nextBatch(batch));
+              assertEquals(true, rows.nextBatch(batch), "row " + r);
               current = 0;
             }
-            assertEquals("row " + r, timeStrings[r] + " UTC",
-                timestampToString(timeT1.time[current], timeT1.nanos[current], UTC));
-            assertEquals("row " + r,
+            assertEquals(timeStrings[r] + " UTC",
+                timestampToString(timeT1.time[current], timeT1.nanos[current], UTC),
+                "row " + r);
+            assertEquals(
                 timestampToString(Instant.from(WRITER_FORMAT.parse(timeStrings[r])), UTC),
-                timestampToString(timeT2.time[current], timeT2.nanos[current], UTC));
+                timestampToString(timeT2.time[current], timeT2.nanos[current], UTC),
+                "row " + r);
             current += 1;
           }
           assertEquals(false, rows.nextBatch(batch));
@@ -2387,7 +2389,7 @@ public class TestSchemaEvolution {
         int current = 0;
         for (int r = 0; r < VALUES; ++r) {
           if (current == batch.size) {
-            assertEquals("row " + r, true, rows.nextBatch(batch));
+            assertEquals(true, rows.nextBatch(batch), "row " + r);
             current = 0;
           }
 
@@ -2397,41 +2399,54 @@ public class TestSchemaEvolution {
           String expectedDate1 = midnight + " " + READER_ZONE.getId();
           String expectedDate2 = midnight + " " + UTC.getId();
 
-          assertEquals("row " + r, expected1.replace(".1 ", " "),
-              timestampToString(l1.time[current], l1.nanos[current], READER_ZONE));
+          String msg = "row " + r;
+          assertEquals(expected1.replace(".1 ", " "),
+              timestampToString(l1.time[current], l1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected2.replace(".1 ", " "),
-              timestampToString(l2.time[current], l2.nanos[current], WRITER_ZONE));
+          assertEquals(expected2.replace(".1 ", " "),
+              timestampToString(l2.time[current], l2.nanos[current], WRITER_ZONE),
+              msg);
 
-          assertEquals("row " + r, longTimestampToString(((r % 128) - offset), READER_ZONE),
-              timestampToString(t1.time[current], t1.nanos[current], READER_ZONE));
+          assertEquals(longTimestampToString(((r % 128) - offset), READER_ZONE),
+              timestampToString(t1.time[current], t1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, longTimestampToString((r % 128), WRITER_ZONE),
-              timestampToString(t2.time[current], t2.nanos[current], WRITER_ZONE));
+          assertEquals(longTimestampToString((r % 128), WRITER_ZONE),
+              timestampToString(t2.time[current], t2.nanos[current], WRITER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected1,
-              timestampToString(d1.time[current], d1.nanos[current], READER_ZONE));
+          assertEquals(expected1,
+              timestampToString(d1.time[current], d1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected2,
-              timestampToString(d2.time[current], d2.nanos[current], WRITER_ZONE));
+          assertEquals(expected2,
+              timestampToString(d2.time[current], d2.nanos[current], WRITER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected1,
-              timestampToString(dbl1.time[current], dbl1.nanos[current], READER_ZONE));
+          assertEquals(expected1,
+              timestampToString(dbl1.time[current], dbl1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected2,
-              timestampToString(dbl2.time[current], dbl2.nanos[current], WRITER_ZONE));
+          assertEquals(expected2,
+              timestampToString(dbl2.time[current], dbl2.nanos[current], WRITER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expectedDate1,
-              timestampToString(dt1.time[current], dt1.nanos[current], READER_ZONE));
+          assertEquals(expectedDate1,
+              timestampToString(dt1.time[current], dt1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expectedDate2,
-              timestampToString(dt2.time[current], dt2.nanos[current], UTC));
+          assertEquals(expectedDate2,
+              timestampToString(dt2.time[current], dt2.nanos[current], UTC),
+              msg);
 
-          assertEquals("row " + r, expected1,
-              timestampToString(s1.time[current], s1.nanos[current], READER_ZONE));
+          assertEquals(expected1,
+              timestampToString(s1.time[current], s1.nanos[current], READER_ZONE),
+              msg);
 
-          assertEquals("row " + r, expected2,
-              timestampToString(s2.time[current], s2.nanos[current], WRITER_ZONE));
+          assertEquals(expected2,
+              timestampToString(s2.time[current], s2.nanos[current], WRITER_ZONE),
+              msg);
           current += 1;
         }
         assertEquals(false, rows.nextBatch(batch));
@@ -2444,7 +2459,7 @@ public class TestSchemaEvolution {
         int current = 0;
         for (int r = 0; r < VALUES; ++r) {
           if (current == batch.size) {
-            assertEquals("row " + r, true, rows.nextBatch(batch));
+            assertEquals(true, rows.nextBatch(batch), "row " + r);
             current = 0;
           }
 
@@ -2452,36 +2467,46 @@ public class TestSchemaEvolution {
           String expected2 = timeStrings[r] + " " + WRITER_ZONE.getId();
           String midnight = timeStrings[r].substring(0, 10) + " 00:00:00";
           String expectedDate = midnight + " " + UTC.getId();
-
-          assertEquals("row " + r, expected1.replace(".1 ", " "),
-              timestampToString(l1.time[current], l1.nanos[current], UTC));
-
-          assertEquals("row " + r, expected2.replace(".1 ", " "),
-              timestampToString(l2.time[current], l2.nanos[current], WRITER_ZONE));
-
-          assertEquals("row " + r, expected1,
-              timestampToString(d1.time[current], d1.nanos[current], UTC));
-
-          assertEquals("row " + r, expected2,
-              timestampToString(d2.time[current], d2.nanos[current], WRITER_ZONE));
-
-          assertEquals("row " + r, expected1,
-              timestampToString(dbl1.time[current], dbl1.nanos[current], UTC));
-
-          assertEquals("row " + r, expected2,
-              timestampToString(dbl2.time[current], dbl2.nanos[current], WRITER_ZONE));
-
-          assertEquals("row " + r, expectedDate,
-              timestampToString(dt1.time[current], dt1.nanos[current], UTC));
-
-          assertEquals("row " + r, expectedDate,
-              timestampToString(dt2.time[current], dt2.nanos[current], UTC));
-
-          assertEquals("row " + r, expected1,
-              timestampToString(s1.time[current], s1.nanos[current], UTC));
-
-          assertEquals("row " + r, expected2,
-              timestampToString(s2.time[current], s2.nanos[current], WRITER_ZONE));
+          String msg = "row " + r;
+          assertEquals(expected1.replace(".1 ", " "),
+              timestampToString(l1.time[current], l1.nanos[current], UTC),
+              msg);
+
+          assertEquals(expected2.replace(".1 ", " "),
+              timestampToString(l2.time[current], l2.nanos[current], WRITER_ZONE),
+              msg);
+
+          assertEquals(expected1,
+              timestampToString(d1.time[current], d1.nanos[current], UTC),
+              msg);
+
+          assertEquals(expected2,
+              timestampToString(d2.time[current], d2.nanos[current], WRITER_ZONE),
+              msg);
+
+          assertEquals(expected1,
+              timestampToString(dbl1.time[current], dbl1.nanos[current], UTC),
+              msg);
+
+          assertEquals(expected2,
+              timestampToString(dbl2.time[current], dbl2.nanos[current], WRITER_ZONE),
+              msg);
+
+          assertEquals(expectedDate,
+              timestampToString(dt1.time[current], dt1.nanos[current], UTC),
+              msg);
+
+          assertEquals(expectedDate,
+              timestampToString(dt2.time[current], dt2.nanos[current], UTC),
+              msg);
+
+          assertEquals(expected1,
+              timestampToString(s1.time[current], s1.nanos[current], UTC),
+              msg);
+
+          assertEquals(expected2,
+              timestampToString(s2.time[current], s2.nanos[current], WRITER_ZONE),
+              msg);
           current += 1;
         }
         assertEquals(false, rows.nextBatch(batch));
@@ -2565,7 +2590,7 @@ public class TestSchemaEvolution {
          RecordReader rows = reader.rows(reader.options().schema(readerSchema))) {
       int value = 0;
       while (value < values.length) {
-        assertTrue("value " + value, rows.nextBatch(batchTimeStamp));
+        assertTrue(rows.nextBatch(batchTimeStamp), "value " + value);
         for(int row=0; row < batchTimeStamp.size; ++row) {
           double expected = values[value + row];
           String rowName = String.format("value %d", value + row);
@@ -2573,18 +2598,18 @@ public class TestSchemaEvolution {
           if (expected * 1000 < Long.MIN_VALUE ||
                   expected * 1000 > Long.MAX_VALUE ||
                   ((expected >= 0) != isPositive)) {
-            assertFalse(rowName, t1.noNulls);
-            assertTrue(rowName, t1.isNull[row]);
+            assertFalse(t1.noNulls, rowName);
+            assertTrue(t1.isNull[row], rowName);
           } else {
             double actual = Math.floorDiv(t1.time[row], 1000) +
                                 t1.nanos[row] / 1_000_000_000.0;
-            assertEquals(rowName, expected, actual,
-                Math.abs(expected * (isFloat ? 0.000001 : 0.0000000000000001)));
-            assertFalse(rowName, t1.isNull[row]);
-            assertTrue(String.format(
-                "%s nanos should be 0 to 1,000,000,000 instead it's: %d",
-                rowName, t1.nanos[row]),
-                t1.nanos[row] >= 0 && t1.nanos[row] < 1_000_000_000);
+            assertEquals(expected, actual,
+                Math.abs(expected * (isFloat ? 0.000001 : 0.0000000000000001)), rowName);
+            assertFalse(t1.isNull[row], rowName);
+            assertTrue(t1.nanos[row] >= 0 && t1.nanos[row] < 1_000_000_000,
+                String.format(
+                    "%s nanos should be 0 to 1,000,000,000 instead it's: %d",
+                    rowName, t1.nanos[row]));
           }
         }
         value += batchTimeStamp.size;
@@ -2607,8 +2632,8 @@ public class TestSchemaEvolution {
     SchemaEvolution evoCc = new SchemaEvolution(typeCamelCaseColumns, null, options);
     SchemaEvolution evoLc = new SchemaEvolution(typeLowerCaseColumns, null, options);
 
-    assertTrue("Schema (" + ccSchema +") was found to be non-acid ", evoCc.isAcid());
-    assertTrue("Schema (" + lcSchema +") was found to be non-acid ", evoLc.isAcid());
+    assertTrue(evoCc.isAcid(), "Schema (" + ccSchema +") was found to be non-acid ");
+    assertTrue(evoLc.isAcid(), "Schema (" + lcSchema +") was found to be non-acid ");
   }
 
   @Test
@@ -2621,13 +2646,13 @@ public class TestSchemaEvolution {
     TypeDescription readerSchema = TypeDescription.fromString(acidSchema);
     SchemaEvolution schemaEvolution = new SchemaEvolution(fileSchema, readerSchema, options);
 
-    assertEquals(String.format("Reader schema %s is not acid", schemaEvolution.getReaderSchema().toString()),
-        acidSchema, schemaEvolution.getReaderSchema().toString());
+    assertEquals(acidSchema, schemaEvolution.getReaderSchema().toString(),
+        String.format("Reader schema %s is not acid", schemaEvolution.getReaderSchema().toString()));
 
     String notAcidSchema ="struct<a:int,b:int>";
     readerSchema = TypeDescription.fromString(notAcidSchema);
     schemaEvolution = new SchemaEvolution(fileSchema, readerSchema, options);
-    assertEquals(String.format("Reader schema %s is not acid", schemaEvolution.getReaderSchema().toString()),
-        acidSchema, schemaEvolution.getReaderSchema().toString());
+    assertEquals(acidSchema, schemaEvolution.getReaderSchema().toString(),
+        String.format("Reader schema %s is not acid", schemaEvolution.getReaderSchema().toString()));
   }
 }
diff --git a/java/core/src/test/org/apache/orc/impl/TestSerializationUtils.java b/java/core/src/test/org/apache/orc/impl/TestSerializationUtils.java
index 5b06622..621ea77 100644
--- a/java/core/src/test/org/apache/orc/impl/TestSerializationUtils.java
+++ b/java/core/src/test/org/apache/orc/impl/TestSerializationUtils.java
@@ -17,9 +17,10 @@
  */
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -27,8 +28,6 @@ import java.io.InputStream;
 import java.math.BigInteger;
 import java.util.Random;
 
-import org.junit.Test;
-
 import com.google.common.math.LongMath;
 
 public class TestSerializationUtils {
@@ -80,10 +79,10 @@ public class TestSerializationUtils {
     for(int i=-8192; i < 8192; ++i) {
       buffer.reset();
         SerializationUtils.writeBigInteger(buffer, BigInteger.valueOf(i));
-      assertEquals("compare length for " + i,
-            i >= -64 && i < 64 ? 1 : 2, buffer.size());
-      assertEquals("compare result for " + i,
-          i, SerializationUtils.readBigInteger(fromBuffer(buffer)).intValue());
+      assertEquals(i >= -64 && i < 64 ? 1 : 2, buffer.size(),
+          "compare length for " + i);
+      assertEquals(i, SerializationUtils.readBigInteger(fromBuffer(buffer)).intValue(),
+          "compare result for " + i);
     }
     buffer.reset();
     SerializationUtils.writeBigInteger(buffer,
diff --git a/java/core/src/test/org/apache/orc/impl/TestStreamName.java b/java/core/src/test/org/apache/orc/impl/TestStreamName.java
index 8ed6ac2..7f2aa8d 100644
--- a/java/core/src/test/org/apache/orc/impl/TestStreamName.java
+++ b/java/core/src/test/org/apache/orc/impl/TestStreamName.java
@@ -19,9 +19,9 @@
 package org.apache.orc.impl;
 
 import org.apache.orc.OrcProto;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestStreamName {
 
@@ -38,7 +38,7 @@ public class TestStreamName {
     assertEquals(false, s1.equals(s2));
     assertEquals(false, s1.equals(s3));
     assertEquals(true, s1.equals(s1p));
-    assertEquals(false, s1.equals(null));
+    assertFalse(s1.equals(null));
     assertEquals(true, s1.compareTo(s2) < 0);
     assertEquals(true, s2.compareTo(s3) < 0);
     assertEquals(true, s3.compareTo(s4) < 0);
diff --git a/java/core/src/test/org/apache/orc/impl/TestStringHashTableDictionary.java b/java/core/src/test/org/apache/orc/impl/TestStringHashTableDictionary.java
index 005e378..42df1d6 100644
--- a/java/core/src/test/org/apache/orc/impl/TestStringHashTableDictionary.java
+++ b/java/core/src/test/org/apache/orc/impl/TestStringHashTableDictionary.java
@@ -24,9 +24,8 @@ import java.util.stream.Stream;
 
 import org.apache.hadoop.io.Text;
 import org.apache.orc.StringDictTestingUtils;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
-
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestStringHashTableDictionary {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestStringRedBlackTree.java b/java/core/src/test/org/apache/orc/impl/TestStringRedBlackTree.java
index f5eca83..a6cf655 100644
--- a/java/core/src/test/org/apache/orc/impl/TestStringRedBlackTree.java
+++ b/java/core/src/test/org/apache/orc/impl/TestStringRedBlackTree.java
@@ -18,13 +18,13 @@
 
 package org.apache.orc.impl;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import java.io.IOException;
 
 import org.apache.hadoop.io.IntWritable;
 import org.apache.orc.StringDictTestingUtils;
-import org.junit.Test;
 
 /**
  * Test the red-black tree with string keys.
diff --git a/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java b/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
index 41c1ab6..5192374 100644
--- a/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
+++ b/java/core/src/test/org/apache/orc/impl/TestWriterImpl.java
@@ -29,14 +29,12 @@ import org.apache.orc.OrcFile;
 import org.apache.orc.Reader;
 import org.apache.orc.TypeDescription;
 import org.apache.orc.Writer;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.util.Collections;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class TestWriterImpl {
 
@@ -46,7 +44,7 @@ public class TestWriterImpl {
   Path testFilePath;
   TypeDescription schema;
 
-  @Before
+  @BeforeEach
   public void openFileSystem() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
@@ -56,16 +54,18 @@ public class TestWriterImpl {
     schema = TypeDescription.fromString("struct<x:int,y:int>");
   }
 
-  @After
+  @AfterEach
   public void deleteTestFile() throws Exception {
     fs.delete(testFilePath, false);
   }
 
-  @Test(expected = IOException.class)
+  @Test
   public void testDefaultOverwriteFlagForWriter() throws Exception {
-    // default value of the overwrite flag is false, so this should fail
-    Writer w = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).setSchema(schema));
-    w.close();
+    assertThrows(IOException.class, () -> {
+      // default value of the overwrite flag is false, so this should fail
+      Writer w = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).setSchema(schema));
+      w.close();
+    });
   }
 
   @Test
diff --git a/java/core/src/test/org/apache/orc/impl/TestZlib.java b/java/core/src/test/org/apache/orc/impl/TestZlib.java
index 88fad79..c4fa97b 100644
--- a/java/core/src/test/org/apache/orc/impl/TestZlib.java
+++ b/java/core/src/test/org/apache/orc/impl/TestZlib.java
@@ -19,13 +19,13 @@
 package org.apache.orc.impl;
 
 import org.apache.orc.CompressionCodec;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class TestZlib {
 
diff --git a/java/core/src/test/org/apache/orc/impl/TestZstd.java b/java/core/src/test/org/apache/orc/impl/TestZstd.java
index 8a6bbea..f69e390 100644
--- a/java/core/src/test/org/apache/orc/impl/TestZstd.java
+++ b/java/core/src/test/org/apache/orc/impl/TestZstd.java
@@ -22,11 +22,11 @@ import io.airlift.compress.zstd.ZstdCompressor;
 import io.airlift.compress.zstd.ZstdDecompressor;
 import org.apache.orc.CompressionCodec;
 import org.apache.orc.CompressionKind;
-import org.junit.Test;
 
 import java.nio.ByteBuffer;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestZstd {
 
diff --git a/java/core/src/test/org/apache/orc/impl/mask/TestDataMask.java b/java/core/src/test/org/apache/orc/impl/mask/TestDataMask.java
index 070e319..aa7f499 100644
--- a/java/core/src/test/org/apache/orc/impl/mask/TestDataMask.java
+++ b/java/core/src/test/org/apache/orc/impl/mask/TestDataMask.java
@@ -29,11 +29,11 @@ import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.DataMask;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 
 import java.nio.charset.StandardCharsets;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestDataMask {
 
@@ -155,20 +155,22 @@ public class TestDataMask {
 
     // check the outputs
     for(int i=0; i < 3; ++i) {
-      assertEquals("iter " + i, (i + 1) + "." + (i + 1), a.vector[i].toString());
-      assertEquals("iter " + i, 1.25 * (i + 1), b.vector[i], 0.0001);
-      assertEquals("iter " + i, i == 0 ? 0 : c.offsets[i-1] + c.lengths[i-1], c.offsets[i]);
-      assertEquals("iter " + i, 2 * i, c.lengths[i]);
-      assertEquals("iter " + i, i == 0 ? 4 : d.offsets[i-1] - d.lengths[i], d.offsets[i]);
-      assertEquals("iter " + i, 2, d.lengths[i]);
-      assertEquals("iter " + i, i % 2, e.tags[i]);
-      assertEquals("iter " + i, Integer.toHexString(0x123 * i), f.toString(i));
+      String msg = "iter " + i;
+      assertEquals((i + 1) + "." + (i + 1), a.vector[i].toString(), msg);
+      assertEquals(1.25 * (i + 1), b.vector[i], 0.0001, msg);
+      assertEquals(i == 0 ? 0 : c.offsets[i-1] + c.lengths[i-1], c.offsets[i], msg);
+      assertEquals(2 * i, c.lengths[i], msg);
+      assertEquals(i == 0 ? 4 : d.offsets[i-1] - d.lengths[i], d.offsets[i], msg);
+      assertEquals(2, d.lengths[i], msg);
+      assertEquals(i % 2, e.tags[i], msg);
+      assertEquals(Integer.toHexString(0x123 * i), f.toString(i), msg);
     }
     // check the subvalues for the list and map
     for(int i=0; i < 6; ++i) {
-      assertEquals("iter " + i, i, ce.vector[i]);
-      assertEquals("iter " + i, i * 1111, dk.time[i]);
-      assertEquals("iter " + i, i * 11, dv.vector[i]);
+      String msg = "iter " + i;
+      assertEquals(i, ce.vector[i], msg);
+      assertEquals(i * 1111, dk.time[i], msg);
+      assertEquals(i * 11, dv.vector[i], msg);
     }
     assertEquals(0, e1.vector[0]);
     assertEquals(20, e1.vector[2]);
diff --git a/java/core/src/test/org/apache/orc/impl/mask/TestRedactMask.java b/java/core/src/test/org/apache/orc/impl/mask/TestRedactMask.java
index ff4ef35..5da8ba1 100644
--- a/java/core/src/test/org/apache/orc/impl/mask/TestRedactMask.java
+++ b/java/core/src/test/org/apache/orc/impl/mask/TestRedactMask.java
@@ -20,13 +20,13 @@ package org.apache.orc.impl.mask;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.junit.Test;
 
 import java.nio.charset.StandardCharsets;
 import java.sql.Date;
 import java.sql.Timestamp;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestRedactMask {
 
@@ -59,12 +59,12 @@ public class TestRedactMask {
         if (i == 18 && digit == 9) {
           expected = 999_999_999_999_999_999L;
         }
-        assertEquals("digit " + digit + " value " + input, expected,
-            mask.maskLong(input));
-        assertEquals("digit " + digit + " value " + (5 * input), expected,
-            mask.maskLong(5 * input));
-        assertEquals("digit " + digit + " value " + (9 * input), expected,
-            mask.maskLong(9 * input));
+        assertEquals(expected, mask.maskLong(input),
+            "digit " + digit + " value " + input);
+        assertEquals(expected, mask.maskLong(5 * input),
+            "digit " + digit + " value " + (5 * input));
+        assertEquals(expected, mask.maskLong(9 * input),
+            "digit " + digit + " value " + (9 * input));
         expected = expected * 10 + digit;
         input *= 10;
       }
@@ -235,9 +235,9 @@ public class TestRedactMask {
         " \uD863\uDCCA\uD863\uDCCA\uD863\uDCCA\uD863\uDCCA" +
         "\uD863\uDCCA\uD863\uDCCA\uD863\uDCCA\uD863\uDCCA.";
     for(int r=0; r < 1024; ++r) {
-      assertEquals("r = " + r, expected,
+      assertEquals(expected,
           new String(target.vector[r], target.start[r], target.length[r],
-              StandardCharsets.UTF_8));
+              StandardCharsets.UTF_8), "r = " + r);
     }
 
     // Make sure that the target keeps the larger output buffer.
diff --git a/java/core/src/test/org/apache/orc/impl/mask/TestSHA256Mask.java b/java/core/src/test/org/apache/orc/impl/mask/TestSHA256Mask.java
index f63b79c..0dce8d6 100644
--- a/java/core/src/test/org/apache/orc/impl/mask/TestSHA256Mask.java
+++ b/java/core/src/test/org/apache/orc/impl/mask/TestSHA256Mask.java
@@ -18,7 +18,6 @@ package org.apache.orc.impl.mask;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 
 import javax.xml.bind.DatatypeConverter;
 import java.nio.ByteBuffer;
@@ -26,8 +25,9 @@ import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class TestSHA256Mask {
 
diff --git a/java/core/src/test/org/apache/orc/impl/mask/TestUnmaskRange.java b/java/core/src/test/org/apache/orc/impl/mask/TestUnmaskRange.java
index 02269b6..ddef12b 100644
--- a/java/core/src/test/org/apache/orc/impl/mask/TestUnmaskRange.java
+++ b/java/core/src/test/org/apache/orc/impl/mask/TestUnmaskRange.java
@@ -19,11 +19,11 @@ package org.apache.orc.impl.mask;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.junit.Test;
 
 import java.nio.charset.StandardCharsets;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Test Unmask option
diff --git a/java/core/src/test/org/apache/orc/impl/reader/TestReaderEncryptionVariant.java b/java/core/src/test/org/apache/orc/impl/reader/TestReaderEncryptionVariant.java
index 14579f8..7a81bc4 100644
--- a/java/core/src/test/org/apache/orc/impl/reader/TestReaderEncryptionVariant.java
+++ b/java/core/src/test/org/apache/orc/impl/reader/TestReaderEncryptionVariant.java
@@ -21,13 +21,13 @@ package org.apache.orc.impl.reader;
 import org.apache.orc.EncryptionAlgorithm;
 import org.apache.orc.OrcProto;
 import org.apache.orc.StripeInformation;
-import org.junit.Test;
 
 import java.io.IOException;
 import java.util.List;
 import java.util.ArrayList;
 
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
 public class TestReaderEncryptionVariant {
 
diff --git a/java/core/src/test/org/apache/orc/util/TestBloomFilter.java b/java/core/src/test/org/apache/orc/util/TestBloomFilter.java
index 8ce99e2..80a5c55 100644
--- a/java/core/src/test/org/apache/orc/util/TestBloomFilter.java
+++ b/java/core/src/test/org/apache/orc/util/TestBloomFilter.java
@@ -22,10 +22,10 @@ import com.google.protobuf.ByteString;
 import org.apache.orc.OrcFile;
 import org.apache.orc.OrcProto;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 /**
  * Tests for BloomFilter
diff --git a/java/core/src/test/org/apache/orc/util/TestMurmur3.java b/java/core/src/test/org/apache/orc/util/TestMurmur3.java
index 13ddb9f..f59d130 100644
--- a/java/core/src/test/org/apache/orc/util/TestMurmur3.java
+++ b/java/core/src/test/org/apache/orc/util/TestMurmur3.java
@@ -18,7 +18,8 @@
 
 package org.apache.orc.util;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 import com.google.common.hash.HashFunction;
 import com.google.common.hash.Hashing;
@@ -27,7 +28,6 @@ import java.nio.ByteOrder;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Random;
-import org.junit.Test;
 
 /**
  * Tests for Murmur3 variants.
diff --git a/java/core/src/test/org/apache/orc/util/TestOrcUtils.java b/java/core/src/test/org/apache/orc/util/TestOrcUtils.java
index 5e2a4db..4e269c9 100644
--- a/java/core/src/test/org/apache/orc/util/TestOrcUtils.java
+++ b/java/core/src/test/org/apache/orc/util/TestOrcUtils.java
@@ -23,8 +23,8 @@ import java.util.Arrays;
 import org.apache.orc.OrcUtils;
 import org.apache.orc.TypeDescription;
 
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  * Tests for OrcUtils.
diff --git a/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java b/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
index 00e7a59..989c68d 100644
--- a/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
+++ b/java/core/src/test/org/apache/orc/util/TestStreamWrapperFileSystem.java
@@ -29,13 +29,13 @@ import org.apache.orc.Reader;
 import org.apache.orc.RecordReader;
 import org.apache.orc.TestVectorOrcFile;
 import org.apache.orc.TypeDescription;
-import org.junit.Test;
 
 import java.io.IOException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 
 /**
  * Tests for StreamWrapperFileSystem.
@@ -75,10 +75,10 @@ public class TestStreamWrapperFileSystem {
       int current = 0;
       for(int r=0; r < 7500; ++r) {
         if (current >= batch.size) {
-          assertTrue("row " + r, rows.nextBatch(batch));
+          assertTrue(rows.nextBatch(batch), "row " + r);
           current = 0;
         }
-        assertEquals("row " + r, r % 2, boolean1.vector[current++]);
+        assertEquals(r % 2, boolean1.vector[current++], "row " + r);
       }
     }
   }
diff --git a/java/pom.xml b/java/pom.xml
index b2b6141..c8c872d 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -775,26 +775,20 @@
       </dependency>
       <dependency>
         <groupId>org.junit.jupiter</groupId>
-        <artifactId>junit-jupiter-engine</artifactId>
-        <version>5.7.0</version>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.junit.jupiter</groupId>
         <artifactId>junit-jupiter-params</artifactId>
         <version>5.7.0</version>
         <scope>test</scope>
       </dependency>
       <dependency>
-        <groupId>org.junit.vintage</groupId>
-        <artifactId>junit-vintage-engine</artifactId>
-        <version>5.7.0</version>
+        <groupId>org.mockito</groupId>
+        <artifactId>mockito-core</artifactId>
+        <version>3.7.0</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.mockito</groupId>
-        <artifactId>mockito-core</artifactId>
-        <version>3.7.0</version>
+        <artifactId>mockito-junit-jupiter</artifactId>
+        <version>3.11.2</version>
         <scope>test</scope>
       </dependency>
       <dependency>
diff --git a/java/shims/pom.xml b/java/shims/pom.xml
index 498b54a..7afa0c2 100644
--- a/java/shims/pom.xml
+++ b/java/shims/pom.xml
@@ -56,7 +56,7 @@
     <!-- test inter-project -->
     <dependency>
       <groupId>org.junit.jupiter</groupId>
-      <artifactId>junit-jupiter-engine</artifactId>
+      <artifactId>junit-jupiter-api</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

[orc] 01/02: ORC-903: Migrate TestVectorOrcFile to JUnit5 (#809)

Posted by do...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-1.7
in repository https://gitbox.apache.org/repos/asf/orc.git

commit 33fdd0934213fe64c4176d692cbb44fe6fdbaa9c
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Tue Aug 3 21:05:04 2021 -0700

    ORC-903: Migrate TestVectorOrcFile to JUnit5 (#809)
    
    ### What changes were proposed in this pull request?
    
    This PR aims to migrate `TestVectorOrcFile` to JUnit5.
    
    ### Why are the changes needed?
    
    `TestVectorOrcFile` is one of the largest test file in ORC which has 4768 lines.
    This is the first step to migrate `core` module. The others are smaller than this.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    **BEFORE**
    ```
    $ mvn test -pl core -Dtest=org.apache.orc.TestVectorOrcFile
    [WARNING] Tests run: 147, Failures: 0, Errors: 0, Skipped: 18, Time elapsed: 6.669 s - in org.apache.orc.TestVectorOrcFile
    ```
    
    **AFTER**
    ```
    $ mvn test -pl core -Dtest=org.apache.orc.TestVectorOrcFile
    [WARNING] Tests run: 147, Failures: 0, Errors: 0, Skipped: 18, Time elapsed: 6.844 s - in org.apache.orc.TestVectorOrcFile
    ```
    
    (cherry picked from commit f181a1c6d62ed3f3ae43517c13011458cb0dc14a)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 java/core/pom.xml                                  |   5 +
 .../src/test/org/apache/orc/TestVectorOrcFile.java | 765 +++++++++++----------
 2 files changed, 408 insertions(+), 362 deletions(-)

diff --git a/java/core/pom.xml b/java/core/pom.xml
index 3213072..0f30363 100644
--- a/java/core/pom.xml
+++ b/java/core/pom.xml
@@ -92,6 +92,11 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-params</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.junit.vintage</groupId>
       <artifactId>junit-vintage-engine</artifactId>
       <scope>test</scope>
diff --git a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
index 1c4ba26..31be42f 100644
--- a/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
+++ b/java/core/src/test/org/apache/orc/TestVectorOrcFile.java
@@ -25,6 +25,7 @@ import org.apache.orc.impl.OrcCodecPool;
 
 import org.apache.orc.impl.WriterImpl;
 
+import org.apache.orc.OrcFile.Version;
 import org.apache.orc.OrcFile.WriterOptions;
 
 import com.google.common.collect.Lists;
@@ -58,13 +59,13 @@ import org.apache.orc.impl.DataReaderProperties;
 import org.apache.orc.impl.OrcIndex;
 import org.apache.orc.impl.RecordReaderImpl;
 import org.apache.orc.impl.RecordReaderUtils;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assumptions.*;
+
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 import org.mockito.Mockito;
 
 import javax.xml.bind.DatatypeConverter;
@@ -88,32 +89,21 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.TimeZone;
+import java.util.UUID;
 import java.util.function.IntFunction;
-
-import static junit.framework.TestCase.assertNotNull;
-import static org.junit.Assert.*;
+import java.util.stream.Stream;
 
 /**
  * Tests for the vectorized reader and writer for ORC files.
  */
-@RunWith(Parameterized.class)
 public class TestVectorOrcFile {
 
-  @Parameterized.Parameter
-  public OrcFile.Version fileFormat;
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> getParameters() {
-    OrcFile.Version[] params = new OrcFile.Version[]{
-        OrcFile.Version.V_0_11,
-        OrcFile.Version.V_0_12,
-        OrcFile.Version.UNSTABLE_PRE_2_0};
-
-    List<Object[]> result = new ArrayList<>();
-    for(OrcFile.Version v: params) {
-      result.add(new Object[]{v});
-    }
-    return result;
+  private static Stream<Arguments> data() {
+    return Stream.of(
+        Arguments.of(Version.V_0_11),
+        Arguments.of(Version.V_0_12),
+        Arguments.of(Version.UNSTABLE_PRE_2_0)
+    );
   }
 
   public static String getFileFromClasspath(String name) {
@@ -201,22 +191,20 @@ public class TestVectorOrcFile {
   FileSystem fs;
   Path testFilePath;
 
-  @Rule
-  public TestName testCaseName = new TestName();
-
-  @Before
-  public void openFileSystem () throws Exception {
+  @BeforeEach
+  public void openFileSystem(TestInfo testInfo) throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
     testFilePath = new Path(workDir, "TestVectorOrcFile." +
-        testCaseName.getMethodName().replaceFirst("\\[[0-9]+\\]", "")
-        + "." + fileFormat.getName() + ".orc");
+        testInfo.getTestMethod().get().getName().replaceFirst("\\[[0-9]+\\]", "")
+        + "." + UUID.randomUUID() + ".orc");
     fs.delete(testFilePath, false);
   }
 
-  @Test
-  public void testReadFormat_0_11() throws Exception {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testReadFormat_0_11(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat == Version.V_0_11);
     Path oldFilePath =
         new Path(getFileFromClasspath("orc-file-11-format.orc"));
     Reader reader = OrcFile.createReader(oldFilePath,
@@ -380,8 +368,9 @@ public class TestVectorOrcFile {
     rows.close();
   }
 
-  @Test
-  public void testTimestampBug() throws IOException {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestampBug(Version fileFormat) throws IOException {
     TypeDescription schema = TypeDescription.createTimestamp();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -436,8 +425,9 @@ public class TestVectorOrcFile {
     }
   }
 
-  @Test
-  public void testTimestamp() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testTimestamp(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createTimestamp();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf).setSchema(schema).stripeSize(100000)
@@ -487,8 +477,9 @@ public class TestVectorOrcFile {
     assertEquals(true, Arrays.equals(expected, included));
   }
 
-  @Test
-  public void testStringAndBinaryStatistics() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testStringAndBinaryStatistics(Version fileFormat) throws Exception {
 
     TypeDescription schema = TypeDescription.createStruct()
         .addField("bytes1", TypeDescription.createBinary())
@@ -581,8 +572,9 @@ public class TestVectorOrcFile {
     rows.close();
   }
 
-  @Test
-  public void testHiveDecimalStatsAllNulls() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testHiveDecimalStatsAllNulls(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
       .addField("dec1", TypeDescription.createDecimal());
 
@@ -612,8 +604,9 @@ public class TestVectorOrcFile {
     assertEquals(new HiveDecimalWritable(0).getHiveDecimal(), ((DecimalColumnStatistics)stats[1]).getSum());
   }
 
-  @Test
-  public void testStripeLevelStats() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testStripeLevelStats(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<int1:int,string1:string>");
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -710,8 +703,9 @@ public class TestVectorOrcFile {
         items.get(0).getStatistics().getIntStatistics().getMaximum());
   }
 
-  @Test
-  public void testStripeLevelStatsNoForce() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testStripeLevelStatsNoForce(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<int1:int,string1:string>");
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -809,14 +803,16 @@ public class TestVectorOrcFile {
 
   private static void checkInner(StructColumnVector inner, int rowId,
                                  int rowInBatch, int i, String value) {
-    assertEquals("row " + rowId, i,
-        ((LongColumnVector) inner.fields[0]).vector[rowInBatch]);
+    assertEquals(i,
+        ((LongColumnVector) inner.fields[0]).vector[rowInBatch],
+        "row " + rowId);
     if (value != null) {
-      assertEquals("row " + rowId, value,
-          ((BytesColumnVector) inner.fields[1]).toString(rowInBatch));
+      assertEquals(value,
+          ((BytesColumnVector) inner.fields[1]).toString(rowInBatch),
+          "row " + rowId);
     } else {
-      assertEquals("row " + rowId, true, inner.fields[1].isNull[rowInBatch]);
-      assertEquals("row " + rowId, false, inner.fields[1].noNulls);
+      assertEquals(true, inner.fields[1].isNull[rowInBatch], "row " + rowId);
+      assertEquals(false, inner.fields[1].noNulls, "row " + rowId);
     }
   }
 
@@ -843,7 +839,7 @@ public class TestVectorOrcFile {
   private static void checkInnerList(ListColumnVector list, int rowId,
                                      int rowInBatch, List<InnerStruct> value) {
     if (value != null) {
-      assertEquals("row " + rowId, value.size(), list.lengths[rowInBatch]);
+      assertEquals(value.size(), list.lengths[rowInBatch], "row " + rowId);
       int start = (int) list.offsets[rowInBatch];
       for (int i = 0; i < list.lengths[rowInBatch]; ++i) {
         InnerStruct inner = value.get(i);
@@ -852,8 +848,8 @@ public class TestVectorOrcFile {
       }
       list.childCount += value.size();
     } else {
-      assertEquals("row " + rowId, true, list.isNull[rowInBatch]);
-      assertEquals("row " + rowId, false, list.noNulls);
+      assertEquals(true, list.isNull[rowInBatch], "row " + rowId);
+      assertEquals(false, list.noNulls, "row " + rowId);
     }
   }
 
@@ -886,7 +882,7 @@ public class TestVectorOrcFile {
                                     int rowInBatch,
                                     Map<String, InnerStruct> value) {
     if (value != null) {
-      assertEquals("row " + rowId, value.size(), map.lengths[rowInBatch]);
+      assertEquals(value.size(), map.lengths[rowInBatch], "row " + rowId);
       int offset = (int) map.offsets[rowInBatch];
       for(int i=0; i < value.size(); ++i) {
         String key = ((BytesColumnVector) map.keys).toString(offset + i);
@@ -895,8 +891,8 @@ public class TestVectorOrcFile {
             expected.int1, expected.string1.toString());
       }
     } else {
-      assertEquals("row " + rowId, true, map.isNull[rowId]);
-      assertEquals("row " + rowId, false, map.noNulls);
+      assertEquals(true, map.isNull[rowId], "row " + rowId);
+      assertEquals(false, map.noNulls, "row " + rowId);
     }
   }
 
@@ -916,8 +912,8 @@ public class TestVectorOrcFile {
       checkInnerList((ListColumnVector) middle.fields[0], rowId, rowInBatch,
           value.list);
     } else {
-      assertEquals("row " + rowId, true, middle.isNull[rowInBatch]);
-      assertEquals("row " + rowId, false, middle.noNulls);
+      assertEquals(true, middle.isNull[rowInBatch], "row " + rowId);
+      assertEquals(false, middle.noNulls, "row " + rowId);
     }
   }
 
@@ -960,29 +956,31 @@ public class TestVectorOrcFile {
                                   double d1, BytesWritable b3, String s2,
                                   MiddleStruct m1, List<InnerStruct> l2,
                                   Map<String, InnerStruct> m2) {
-    assertEquals("row " + rowId, b1, getBoolean(batch, rowInBatch));
-    assertEquals("row " + rowId, b2, getByte(batch, rowInBatch));
-    assertEquals("row " + rowId, s1, getShort(batch, rowInBatch));
-    assertEquals("row " + rowId, i1, getInt(batch, rowInBatch));
-    assertEquals("row " + rowId, l1, getLong(batch, rowInBatch));
-    assertEquals("row " + rowId, f1, getFloat(batch, rowInBatch), 0.0001);
-    assertEquals("row " + rowId, d1, getDouble(batch, rowInBatch), 0.0001);
+    String msg = "row " + rowId;
+    assertEquals(b1, getBoolean(batch, rowInBatch), msg);
+    assertEquals(b2, getByte(batch, rowInBatch), msg);
+    assertEquals(s1, getShort(batch, rowInBatch), msg);
+    assertEquals(i1, getInt(batch, rowInBatch), msg);
+    assertEquals(l1, getLong(batch, rowInBatch), msg);
+    assertEquals(f1, getFloat(batch, rowInBatch), 0.0001, msg);
+    assertEquals(d1, getDouble(batch, rowInBatch), 0.0001, msg);
     if (b3 != null) {
       BytesColumnVector bytes = (BytesColumnVector) batch.cols[7];
-      assertEquals("row " + rowId, b3.getLength(), bytes.length[rowInBatch]);
+      assertEquals(b3.getLength(), bytes.length[rowInBatch], msg);
       for(int i=0; i < b3.getLength(); ++i) {
-        assertEquals("row " + rowId + " byte " + i, b3.getBytes()[i],
-            bytes.vector[rowInBatch][bytes.start[rowInBatch] + i]);
+        assertEquals(b3.getBytes()[i],
+            bytes.vector[rowInBatch][bytes.start[rowInBatch] + i],
+            "row " + rowId + " byte " + i);
       }
     } else {
-      assertEquals("row " + rowId, true, batch.cols[7].isNull[rowInBatch]);
-      assertEquals("row " + rowId, false, batch.cols[7].noNulls);
+      assertEquals(true, batch.cols[7].isNull[rowInBatch], msg);
+      assertEquals(false, batch.cols[7].noNulls, msg);
     }
     if (s2 != null) {
-      assertEquals("row " + rowId, s2, getText(batch, rowInBatch).toString());
+      assertEquals(s2, getText(batch, rowInBatch).toString(), "row " + rowId);
     } else {
-      assertEquals("row " + rowId, true, batch.cols[8].isNull[rowInBatch]);
-      assertEquals("row " + rowId, false, batch.cols[8].noNulls);
+      assertEquals(true, batch.cols[8].isNull[rowInBatch], msg);
+      assertEquals(false, batch.cols[8].noNulls, msg);
     }
     checkMiddleStruct((StructColumnVector) batch.cols[9], rowId, rowInBatch,
         m1);
@@ -1158,8 +1156,9 @@ public class TestVectorOrcFile {
     assertEquals(false, diff);
   }
 
-  @Test
-  public void test1() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void test1(Version fileFormat) throws Exception {
     TypeDescription schema = createBigRowSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf)
@@ -1374,8 +1373,9 @@ public class TestVectorOrcFile {
     }
   }
 
-  @Test
-  public void testColumnProjection() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testColumnProjection(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -1471,8 +1471,9 @@ public class TestVectorOrcFile {
     rows2.close();
   }
 
-  @Test
-  public void testEmptyFile() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testEmptyFile(Version fileFormat) throws Exception {
     TypeDescription schema = createBigRowSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -1494,8 +1495,9 @@ public class TestVectorOrcFile {
     assertEquals(false, reader.getStripes().iterator().hasNext());
   }
 
-  @Test
-  public void metaData() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void metaData(Version fileFormat) throws Exception {
     TypeDescription schema = createBigRowSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf)
@@ -1552,7 +1554,7 @@ public class TestVectorOrcFile {
   /**
    * Generate an ORC file with a range of dates and times.
    */
-  public void createOrcDateFile(Path file, int minYear, int maxYear
+  public void createOrcDateFile(Path file, int minYear, int maxYear, Version fileFormat
                                 ) throws IOException {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("time", TypeDescription.createTimestamp())
@@ -1607,27 +1609,30 @@ public class TestVectorOrcFile {
       for(int row = 0; row < 1000; ++row) {
         Timestamp expected = Timestamp.valueOf(
             String.format("%04d-05-05 12:34:56.%04d", year, 2*row));
-        assertEquals("ms row " + row + " " + expected, expected.getTime(),
-            times.time[row]);
-        assertEquals("nanos row " + row + " " + expected, expected.getNanos(),
-            times.nanos[row]);
-        assertEquals("year " + year + " row " + row,
+        assertEquals(expected.getTime(), times.time[row],
+            "ms row " + row + " " + expected);
+        assertEquals(expected.getNanos(), times.nanos[row],
+            "nanos row " + row + " " + expected);
+        assertEquals(
             Integer.toString(year) + "-12-25",
-            new DateWritable((int) dates.vector[row]).toString());
+            new DateWritable((int) dates.vector[row]).toString(),
+            "year " + year + " row " + row);
       }
     }
     rows.nextBatch(batch);
     assertEquals(1, batch.size);
   }
 
-  @Test
-  public void testDate1900() throws Exception {
-    createOrcDateFile(testFilePath, 1900, 1970);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDate1900(Version fileFormat) throws Exception {
+    createOrcDateFile(testFilePath, 1900, 1970, fileFormat);
   }
 
-  @Test
-  public void testDate2038() throws Exception {
-    createOrcDateFile(testFilePath, 2038, 2250);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDate2038(Version fileFormat) throws Exception {
+    createOrcDateFile(testFilePath, 2038, 2250, fileFormat);
   }
 
   private static void setUnion(VectorizedRowBatch batch, int rowId,
@@ -1681,8 +1686,9 @@ public class TestVectorOrcFile {
   /**
    * Test writing with the new decimal and reading with the new and old.
    */
-  @Test
-  public void testDecimal64Writing() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDecimal64Writing(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.fromString("struct<d:decimal(18,3)>");
     VectorizedRowBatch batch = schema.createRowBatchV2();
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -1718,9 +1724,9 @@ public class TestVectorOrcFile {
     assertEquals(19, batch.size);
     assertEquals(18, cv.precision);
     assertEquals(3, cv.scale);
-    assertEquals("row 0", 1, cv.vector[0]);
+    assertEquals(1, cv.vector[0], "row 0");
     for(int r=1; r < 18; ++r) {
-      assertEquals("row " + r, 10 * cv.vector[r-1], cv.vector[r]);
+      assertEquals(10 * cv.vector[r-1], cv.vector[r], "row " + r);
     }
     assertEquals(-2000, cv.vector[18]);
     assertFalse(rows.nextBatch(batch));
@@ -1758,8 +1764,9 @@ public class TestVectorOrcFile {
   /**
    * Test writing with the old decimal and reading with the new and old.
    */
-  @Test
-  public void testDecimal64Reading() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testDecimal64Reading(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.fromString("struct<d:decimal(18,4)>");
     VectorizedRowBatch batch = schema.createRowBatch();
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -1798,9 +1805,9 @@ public class TestVectorOrcFile {
     assertEquals(19, batch.size);
     assertEquals(18, newCv.precision);
     assertEquals(4, newCv.scale);
-    assertEquals("row 0", 1, newCv.vector[0]);
+    assertEquals(1, newCv.vector[0], "row 0");
     for(int r=1; r < 18; ++r) {
-      assertEquals("row " + r, 10 * newCv.vector[r-1], newCv.vector[r]);
+      assertEquals(10 * newCv.vector[r-1], newCv.vector[r], "row " + r);
     }
     assertEquals(-20000, newCv.vector[18]);
     assertFalse(rows.nextBatch(batch));
@@ -1840,8 +1847,9 @@ public class TestVectorOrcFile {
      * object inspector manually. (The Hive reflection-based doesn't handle
      * them properly.)
      */
-  @Test
-  public void testUnionAndTimestamp() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testUnionAndTimestamp(Version fileFormat) throws Exception {
     final TimeZone original = TimeZone.getDefault();
     TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
     TypeDescription schema = TypeDescription.fromString(
@@ -2079,14 +2087,15 @@ public class TestVectorOrcFile {
 
     for(int i=0; i < 5; ++i) {
       rows.nextBatch(batch);
-      assertEquals("batch " + i, 1000, batch.size);
-      assertEquals("batch " + i, false, union.isRepeating);
-      assertEquals("batch " + i, true, union.noNulls);
+      String msg = "batch " + i;
+      assertEquals(1000, batch.size, msg);
+      assertEquals(false, union.isRepeating, msg);
+      assertEquals(true, union.noNulls, msg);
       for(int r=0; r < batch.size; ++r) {
-        assertEquals("bad tag at " + i + "." +r, 0, union.tags[r]);
+        assertEquals(0, union.tags[r], "bad tag at " + i + "." + r);
       }
-      assertEquals("batch " + i, true, longs.isRepeating);
-      assertEquals("batch " + i, 1732050807, longs.vector[0]);
+      assertEquals(true, longs.isRepeating, msg);
+      assertEquals(1732050807, longs.vector[0], msg);
     }
 
     rows.nextBatch(batch);
@@ -2117,8 +2126,9 @@ public class TestVectorOrcFile {
    * Read and write a randomly generated snappy file.
    * @throws Exception
    */
-  @Test
-  public void testSnappy() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testSnappy(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -2156,8 +2166,9 @@ public class TestVectorOrcFile {
    * Read and write a randomly generated lzo file.
    * @throws Exception
    */
-  @Test
-  public void testLzo() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testLzo(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<x:bigint,y:int,z:bigint>");
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -2206,8 +2217,9 @@ public class TestVectorOrcFile {
    * Read and write a randomly generated lz4 file.
    * @throws Exception
    */
-  @Test
-  public void testLz4() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testLz4(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<x:bigint,y:int,z:bigint>");
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -2255,8 +2267,9 @@ public class TestVectorOrcFile {
   /**
    * Read and write a randomly generated zstd file.
    */
-  @Test
-  public void testZstd() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testZstd(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<x:bigint,y:int,z:bigint>");
     try (Writer writer = OrcFile.createWriter(testFilePath,
@@ -2304,8 +2317,9 @@ public class TestVectorOrcFile {
    * Read and write a file; verify codec usage.
    * @throws Exception
    */
-  @Test
-  public void testCodecPool() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testCodecPool(Version fileFormat) throws Exception {
     OrcCodecPool.clear();
     TypeDescription schema = createInnerSchema();
     VectorizedRowBatch batch = schema.createRowBatch();
@@ -2394,8 +2408,9 @@ public class TestVectorOrcFile {
    * Read and write a randomly generated snappy file.
    * @throws Exception
    */
-  @Test
-  public void testWithoutIndex() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testWithoutIndex(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -2432,7 +2447,7 @@ public class TestVectorOrcFile {
     BytesColumnVector strs = (BytesColumnVector) batch.cols[1];
     for(int i=0; i < 50; ++i) {
       rows.nextBatch(batch);
-      assertEquals("batch " + i, 1000, batch.size);
+      assertEquals(1000, batch.size, "batch " + i);
       for(int j=0; j < 200; ++j) {
         int intVal = rand.nextInt();
         String strVal = Integer.toBinaryString(rand.nextInt());
@@ -2447,8 +2462,9 @@ public class TestVectorOrcFile {
     rows.close();
   }
 
-  @Test
-  public void testSeek() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testSeek(Version fileFormat) throws Exception {
     TypeDescription schema = createBigRowSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
@@ -2529,7 +2545,7 @@ public class TestVectorOrcFile {
       if (nextRowInBatch < 0) {
         long base = Math.max(i - 1023, 0);
         rows.seekToRow(base);
-        assertEquals("row " + i, true, rows.nextBatch(batch));
+        assertEquals(true, rows.nextBatch(batch), "row " + i);
         nextRowInBatch = batch.size - 1;
       }
       checkRandomRow(batch, intValues, doubleValues,
@@ -2601,8 +2617,9 @@ public class TestVectorOrcFile {
         new MiddleStruct(inner, inner2), list(), map(inner, inner2));
   }
 
-  @Test
-  public void testMemoryManagement() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testMemoryManagement(Version fileFormat) throws Exception {
     OrcConf.ROWS_BETWEEN_CHECKS.setLong(conf, 100);
     final long POOL_SIZE = 50_000;
     TypeDescription schema = createInnerSchema();
@@ -2640,16 +2657,17 @@ public class TestVectorOrcFile {
     int i = 0;
     for(StripeInformation stripe: reader.getStripes()) {
       i += 1;
-      assertTrue("stripe " + i + " is too long at " + stripe.getDataLength(),
-          stripe.getDataLength() < POOL_SIZE);
+      assertTrue(stripe.getDataLength() < POOL_SIZE,
+          "stripe " + i + " is too long at " + stripe.getDataLength());
     }
     // 0.11 always uses the dictionary, so ends up with a lot more stripes
     assertEquals(fileFormat == OrcFile.Version.V_0_11 ? 25 : 3, i);
     assertEquals(2500, reader.getNumberOfRows());
   }
 
-  @Test
-  public void testPredicatePushdown() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPredicatePushdown(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf)
@@ -2746,8 +2764,9 @@ public class TestVectorOrcFile {
     assertEquals(3500, rows.getRowNumber());
   }
 
-  @Test
-  public void testQuotedPredicatePushdown() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testQuotedPredicatePushdown(Version fileFormat) throws Exception {
     TypeDescription schema = createQuotedSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf)
@@ -2844,8 +2863,9 @@ public class TestVectorOrcFile {
    * writer with different combinations of repeating and null values.
    * @throws Exception
    */
-  @Test
-  public void testRepeating() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testRepeating(Version fileFormat) throws Exception {
     // create a row type with each type that has a unique writer
     // really just folds short, int, and long together
     TypeDescription schema = TypeDescription.createStruct()
@@ -2999,10 +3019,10 @@ public class TestVectorOrcFile {
     assertEquals(4096, stats[0].getNumberOfValues());
     assertEquals(false, stats[0].hasNull());
     for(TypeDescription colType: schema.getChildren()) {
-      assertEquals("count on " + colType.getId(),
-          2048, stats[colType.getId()].getNumberOfValues());
-      assertEquals("hasNull on " + colType.getId(),
-          true, stats[colType.getId()].hasNull());
+      assertEquals(2048, stats[colType.getId()].getNumberOfValues(),
+          "count on " + colType.getId());
+      assertEquals(true, stats[colType.getId()].hasNull(),
+          "hasNull on " + colType.getId());
     }
     assertEquals(8944, ((BinaryColumnStatistics) stats[1]).getSum());
     assertEquals(1536, ((BooleanColumnStatistics) stats[2]).getTrueCount());
@@ -3054,93 +3074,86 @@ public class TestVectorOrcFile {
 
     // read the 1024 nulls
     for(int f=0; f < batch.cols.length; ++f) {
-      assertEquals("field " + f,
-          true, batch.cols[f].isRepeating);
-      assertEquals("field " + f,
-          false, batch.cols[f].noNulls);
-      assertEquals("field " + f,
-          true, batch.cols[f].isNull[0]);
+      assertEquals(true, batch.cols[f].isRepeating, "field " + f);
+      assertEquals(false, batch.cols[f].noNulls, "field " + f);
+      assertEquals(true, batch.cols[f].isNull[0], "field " + f);
     }
 
     // read the 1024 repeat values
     assertEquals(true, rows.nextBatch(batch));
     assertEquals(1024, batch.size);
     for(int r=0; r < 1024; ++r) {
-      assertEquals("row " + r, "Horton", bins.toString(r));
-      assertEquals("row " + r, 1, bools.vector[r]);
-      assertEquals("row " + r, -126, bytes.vector[r]);
-      assertEquals("row " + r, 1311768467463790320L, longs.vector[r]);
-      assertEquals("row " + r, 1.125, floats.vector[r], 0.00001);
-      assertEquals("row " + r, 9.765625E-4, doubles.vector[r], 0.000001);
-      assertEquals("row " + r, "2011-07-01",
-          new DateWritable((int) dates.vector[r]).toString());
-      assertEquals("row " + r, "2015-10-23 10:11:59.999999999",
-          times.asScratchTimestamp(r).toString());
-      assertEquals("row " + r, "1.234567", decs.vector[r].toString());
-      assertEquals("row " + r, "Echelon", strs.toString(r));
-      assertEquals("row " + r, "Juggernaut", chars.toString(r));
-      assertEquals("row " + r, "Dreadnaugh", vcs.toString(r));
-      assertEquals("row " + r, 123, structInts.vector[r]);
-      assertEquals("row " + r, 1, unions.tags[r]);
-      assertEquals("row " + r, 1234, unionInts.vector[r]);
-      assertEquals("row " + r, 3, lists.lengths[r]);
-      assertEquals("row " + r, true, listInts.isRepeating);
-      assertEquals("row " + r, 31415, listInts.vector[0]);
-      assertEquals("row " + r, 3, maps.lengths[r]);
-      assertEquals("row " + r, "ORC", mapKeys.toString((int) maps.offsets[r]));
-      assertEquals("row " + r, "Hive", mapKeys.toString((int) maps.offsets[r] + 1));
-      assertEquals("row " + r, "LLAP", mapKeys.toString((int) maps.offsets[r] + 2));
-      assertEquals("row " + r, "fast", mapValues.toString((int) maps.offsets[r]));
-      assertEquals("row " + r, "fast", mapValues.toString((int) maps.offsets[r] + 1));
-      assertEquals("row " + r, "fast", mapValues.toString((int) maps.offsets[r] + 2));
+      String msg = "row " + r;
+      assertEquals("Horton", bins.toString(r), msg);
+      assertEquals(1, bools.vector[r], msg);
+      assertEquals(-126, bytes.vector[r], msg);
+      assertEquals(1311768467463790320L, longs.vector[r], msg);
+      assertEquals(1.125, floats.vector[r], 0.00001, msg);
+      assertEquals(9.765625E-4, doubles.vector[r], 0.000001, msg);
+      assertEquals("2011-07-01", new DateWritable((int) dates.vector[r]).toString(), msg);
+      assertEquals("2015-10-23 10:11:59.999999999", times.asScratchTimestamp(r).toString(), msg);
+      assertEquals("1.234567", decs.vector[r].toString(), msg);
+      assertEquals("Echelon", strs.toString(r), msg);
+      assertEquals("Juggernaut", chars.toString(r), msg);
+      assertEquals("Dreadnaugh", vcs.toString(r), msg);
+      assertEquals(123, structInts.vector[r], msg);
+      assertEquals(1, unions.tags[r], msg);
+      assertEquals(1234, unionInts.vector[r], msg);
+      assertEquals(3, lists.lengths[r], msg);
+      assertEquals(true, listInts.isRepeating, msg);
+      assertEquals(31415, listInts.vector[0], msg);
+      assertEquals(3, maps.lengths[r], msg);
+      assertEquals("ORC", mapKeys.toString((int) maps.offsets[r]), msg);
+      assertEquals("Hive", mapKeys.toString((int) maps.offsets[r] + 1), msg);
+      assertEquals("LLAP", mapKeys.toString((int) maps.offsets[r] + 2), msg);
+      assertEquals("fast", mapValues.toString((int) maps.offsets[r]), msg);
+      assertEquals("fast", mapValues.toString((int) maps.offsets[r] + 1), msg);
+      assertEquals("fast", mapValues.toString((int) maps.offsets[r] + 2), msg);
     }
 
     // read the second set of 1024 nulls
     assertEquals(true, rows.nextBatch(batch));
     assertEquals(1024, batch.size);
     for(int f=0; f < batch.cols.length; ++f) {
-      assertEquals("field " + f,
-          true, batch.cols[f].isRepeating);
-      assertEquals("field " + f,
-          false, batch.cols[f].noNulls);
-      assertEquals("field " + f,
-          true, batch.cols[f].isNull[0]);
+      assertEquals(true, batch.cols[f].isRepeating, "field " + f);
+      assertEquals(false, batch.cols[f].noNulls, "field " + f);
+      assertEquals(true, batch.cols[f].isNull[0], "field " + f);
     }
 
     assertEquals(true, rows.nextBatch(batch));
     assertEquals(1024, batch.size);
     for(int r=0; r < 1024; ++r) {
       String hex = Integer.toHexString(r);
-
-      assertEquals("row " + r, hex, bins.toString(r));
-      assertEquals("row " + r, r % 2 == 1 ? 1 : 0, bools.vector[r]);
-      assertEquals("row " + r, (byte) (r % 255), bytes.vector[r]);
-      assertEquals("row " + r, 31415L * r, longs.vector[r]);
-      assertEquals("row " + r, 1.125F * r, floats.vector[r], 0.0001);
-      assertEquals("row " + r, 0.0009765625 * r, doubles.vector[r], 0.000001);
-      assertEquals("row " + r, new DateWritable(new Date(111, 6, 1 + r)),
-          new DateWritable((int) dates.vector[r]));
-      assertEquals("row " + r,
+      String msg = "row " + r;
+      assertEquals(hex, bins.toString(r), msg);
+      assertEquals(r % 2 == 1 ? 1 : 0, bools.vector[r], msg);
+      assertEquals((byte) (r % 255), bytes.vector[r], msg);
+      assertEquals(31415L * r, longs.vector[r], msg);
+      assertEquals(1.125F * r, floats.vector[r], 0.0001, msg);
+      assertEquals(0.0009765625 * r, doubles.vector[r], 0.000001, msg);
+      assertEquals(new DateWritable(new Date(111, 6, 1 + r)),
+          new DateWritable((int) dates.vector[r]), msg);
+      assertEquals(
           new Timestamp(115, 9, 25, 10, 11, 59 + r, 999999999),
-          times.asScratchTimestamp(r));
-      assertEquals("row " + r, "1.234567", decs.vector[r].toString());
-      assertEquals("row " + r, Integer.toString(r), strs.toString(r));
-      assertEquals("row " + r, Integer.toHexString(r), chars.toString(r));
-      assertEquals("row " + r, Integer.toHexString(r * 128), vcs.toString(r));
-      assertEquals("row " + r, r + 13, structInts.vector[r]);
-      assertEquals("row " + r, 1, unions.tags[r]);
-      assertEquals("row " + r, r + 42, unionInts.vector[r]);
-      assertEquals("row " + r, 3, lists.lengths[r]);
-      assertEquals("row " + r, 31415, listInts.vector[(int) lists.offsets[r]]);
-      assertEquals("row " + r, 31416, listInts.vector[(int) lists.offsets[r] + 1]);
-      assertEquals("row " + r, 31417, listInts.vector[(int) lists.offsets[r] + 2]);
-      assertEquals("row " + r, 3, maps.lengths[3]);
-      assertEquals("row " + r, Integer.toHexString(3 * r), mapKeys.toString((int) maps.offsets[r]));
-      assertEquals("row " + r, Integer.toString(3 * r), mapValues.toString((int) maps.offsets[r]));
-      assertEquals("row " + r, Integer.toHexString(3 * r + 1), mapKeys.toString((int) maps.offsets[r] + 1));
-      assertEquals("row " + r, Integer.toString(3 * r + 1), mapValues.toString((int) maps.offsets[r] + 1));
-      assertEquals("row " + r, Integer.toHexString(3 * r + 2), mapKeys.toString((int) maps.offsets[r] + 2));
-      assertEquals("row " + r, Integer.toString(3 * r + 2), mapValues.toString((int) maps.offsets[r] + 2));
+          times.asScratchTimestamp(r), msg);
+      assertEquals("1.234567", decs.vector[r].toString(), msg);
+      assertEquals(Integer.toString(r), strs.toString(r), msg);
+      assertEquals(Integer.toHexString(r), chars.toString(r), msg);
+      assertEquals(Integer.toHexString(r * 128), vcs.toString(r), msg);
+      assertEquals(r + 13, structInts.vector[r], msg);
+      assertEquals(1, unions.tags[r], msg);
+      assertEquals(r + 42, unionInts.vector[r], msg);
+      assertEquals(3, lists.lengths[r], msg);
+      assertEquals(31415, listInts.vector[(int) lists.offsets[r]], msg);
+      assertEquals(31416, listInts.vector[(int) lists.offsets[r] + 1], msg);
+      assertEquals(31417, listInts.vector[(int) lists.offsets[r] + 2], msg);
+      assertEquals(3, maps.lengths[3], msg);
+      assertEquals(Integer.toHexString(3 * r), mapKeys.toString((int) maps.offsets[r]), msg);
+      assertEquals(Integer.toString(3 * r), mapValues.toString((int) maps.offsets[r]), msg);
+      assertEquals(Integer.toHexString(3 * r + 1), mapKeys.toString((int) maps.offsets[r] + 1), msg);
+      assertEquals(Integer.toString(3 * r + 1), mapValues.toString((int) maps.offsets[r] + 1), msg);
+      assertEquals(Integer.toHexString(3 * r + 2), mapKeys.toString((int) maps.offsets[r] + 2), msg);
+      assertEquals(Integer.toString(3 * r + 2), mapValues.toString((int) maps.offsets[r] + 2), msg);
     }
 
     // should have no more rows
@@ -3163,8 +3176,9 @@ public class TestVectorOrcFile {
    * Test the char and varchar padding and truncation.
    * @throws Exception
    */
-  @Test
-  public void testStringPadding() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testStringPadding(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("char", TypeDescription.createChar().withMaxLength(10))
         .addField("varchar", TypeDescription.createVarchar().withMaxLength(10));
@@ -3212,9 +3226,10 @@ public class TestVectorOrcFile {
    * to a column that isn't using dictionary encoding.
    * @throws Exception
    */
-  @Test
-  public void testNonDictionaryRepeatingString() throws Exception {
-    Assume.assumeTrue(fileFormat != OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testNonDictionaryRepeatingString(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat != OrcFile.Version.V_0_11);
     TypeDescription schema = TypeDescription.createStruct()
         .addField("str", TypeDescription.createString());
     Writer writer = OrcFile.createWriter(testFilePath,
@@ -3253,8 +3268,9 @@ public class TestVectorOrcFile {
     assertEquals(false, rows.nextBatch(batch));
   }
 
-  @Test
-  public void testStructs() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testStructs(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("struct", TypeDescription.createStruct()
             .addField("inner", TypeDescription.createLong()));
@@ -3282,10 +3298,10 @@ public class TestVectorOrcFile {
     LongColumnVector vec = (LongColumnVector) inner.fields[0];
     for(int r=0; r < 1024; ++r) {
       if (r < 200 || (r >= 400 && r < 600) || r >= 800) {
-        assertEquals("row " + r, true, inner.isNull[r]);
+        assertEquals(true, inner.isNull[r], "row " + r);
       } else {
-        assertEquals("row " + r, false, inner.isNull[r]);
-        assertEquals("row " + r, r, vec.vector[r]);
+        assertEquals(false, inner.isNull[r], "row " + r);
+        assertEquals(r, vec.vector[r], "row " + r);
       }
     }
     rows.nextBatch(batch);
@@ -3296,8 +3312,9 @@ public class TestVectorOrcFile {
    * Test Unions.
    * @throws Exception
    */
-  @Test
-  public void testUnions() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testUnions(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("outer", TypeDescription.createUnion()
             .addUnionChild(TypeDescription.createInt())
@@ -3339,28 +3356,29 @@ public class TestVectorOrcFile {
     assertEquals(true, rows.nextBatch(batch));
     assertEquals(1024, batch.size);
     for(int r=0; r < 1024; ++r) {
+      String msg = "row " + r;
       if (r < 200) {
-        assertEquals("row " + r, true, union.isNull[r]);
+        assertEquals(true, union.isNull[r], msg);
       } else if (r < 300) {
-        assertEquals("row " + r, false, union.isNull[r]);
-        assertEquals("row " + r, 0, union.tags[r]);
-        assertEquals("row " + r, r, ints.vector[r]);
+        assertEquals(false, union.isNull[r], msg);
+        assertEquals(0, union.tags[r], msg);
+        assertEquals(r, ints.vector[r], msg);
       } else if (r < 400) {
-        assertEquals("row " + r, false, union.isNull[r]);
-        assertEquals("row " + r, 1, union.tags[r]);
-        assertEquals("row " + r, -r, longs.vector[r]);
+        assertEquals(false, union.isNull[r], msg);
+        assertEquals(1, union.tags[r], msg);
+        assertEquals(-r, longs.vector[r], msg);
       } else if (r < 600) {
-        assertEquals("row " + r, true, union.isNull[r]);
+        assertEquals(true, union.isNull[r], msg);
       } else if (r < 800) {
-        assertEquals("row " + r, false, union.isNull[r]);
-        assertEquals("row " + r, 1, union.tags[r]);
-        assertEquals("row " + r, -r, longs.vector[r]);
+        assertEquals(false, union.isNull[r], msg);
+        assertEquals(1, union.tags[r], msg);
+        assertEquals(-r, longs.vector[r], msg);
       } else if (r < 1000) {
-        assertEquals("row " + r, true, union.isNull[r]);
+        assertEquals(true, union.isNull[r], msg);
       } else {
-        assertEquals("row " + r, false, union.isNull[r]);
-        assertEquals("row " + r, 1, union.tags[r]);
-        assertEquals("row " + r, -r, longs.vector[r]);
+        assertEquals(false, union.isNull[r], msg);
+        assertEquals(1, union.tags[r], msg);
+        assertEquals(-r, longs.vector[r], msg);
       }
     }
     assertEquals(false, rows.nextBatch(batch));
@@ -3372,8 +3390,9 @@ public class TestVectorOrcFile {
    * oper lap.
    * @throws Exception
    */
-  @Test
-  public void testLists() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testLists(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("list",
             TypeDescription.createList(TypeDescription.createLong()));
@@ -3417,23 +3436,22 @@ public class TestVectorOrcFile {
     for(int r=0; r < 1024; ++r) {
       StringBuilder actual = new StringBuilder();
       list.stringifyValue(actual, r);
+      String msg = "row " + r;
       if (r < 200) {
-        assertEquals("row " + r, "null", actual.toString());
+        assertEquals("null", actual.toString(), msg);
       } else if (r < 300) {
-        assertEquals("row " + r, "[" + ((r - 200) * 10) + "]",
-            actual.toString());
+        assertEquals("[" + ((r - 200) * 10) + "]", actual.toString(), msg);
       } else if (r < 400) {
-        assertEquals("row " + r, "null", actual.toString());
+        assertEquals("null", actual.toString(), msg);
       } else if (r < 500) {
-        assertEquals("row " + r, "[" + ((r - 300) * 10) + "]",
-            actual.toString());
+        assertEquals("[" + ((r - 300) * 10) + "]", actual.toString(), msg);
       } else if (r < 600) {
-        assertEquals("row " + r, "null", actual.toString());
+        assertEquals("null", actual.toString(), msg);
       } else if (r < 700) {
-        assertEquals("row " + r, "[" + (10 * r) + ", " + (10 * (r + 1)) + "]",
-            actual.toString());
+        assertEquals("[" + (10 * r) + ", " + (10 * (r + 1)) + "]",
+            actual.toString(), msg);
       } else {
-        assertEquals("row " + r, "null", actual.toString());
+        assertEquals("null", actual.toString(), msg);
       }
     }
     assertEquals(false, rows.nextBatch(batch));
@@ -3445,8 +3463,9 @@ public class TestVectorOrcFile {
    * oper lap.
    * @throws Exception
    */
-  @Test
-  public void testMaps() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testMaps(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
         .addField("map",
             TypeDescription.createMap(TypeDescription.createLong(),
@@ -3492,34 +3511,36 @@ public class TestVectorOrcFile {
     for(int r=0; r < 1024; ++r) {
       StringBuilder buffer = new StringBuilder();
       map.stringifyValue(buffer, r);
+      String msg = "row " + r;
       String actual = buffer.toString();
       if (r < 200) {
-        assertEquals("row " + r, "null", actual);
+        assertEquals("null", actual, msg);
       } else if (r < 300) {
-        assertEquals("row " + r, "[{\"key\": " + (r - 200) +
+        assertEquals("[{\"key\": " + (r - 200) +
                 ", \"value\": " + ((r - 200) * 10) + "}]",
-            actual);
+            actual, msg);
       } else if (r < 400) {
-        assertEquals("row " + r, "null", actual);
+        assertEquals("null", actual, msg);
       } else if (r < 500) {
-        assertEquals("row " + r, "[{\"key\": " + (r - 300) +
-                ", \"value\": " + ((r - 300) * 10) + "}]", actual);
+        assertEquals("[{\"key\": " + (r - 300) +
+                ", \"value\": " + ((r - 300) * 10) + "}]", actual, msg);
       } else if (r < 600) {
-        assertEquals("row " + r, "null", actual);
+        assertEquals("null", actual, msg);
       } else if (r < 700) {
-        assertEquals("row " + r, "[{\"key\": " + r + ", \"value\": " + (r * 10)
+        assertEquals("[{\"key\": " + r + ", \"value\": " + (r * 10)
                 + "}, {\"key\": " + (r + 1) + ", \"value\": " + (10 * (r + 1))
-                + "}]", actual);
+                + "}]", actual, msg);
       } else {
-        assertEquals("row " + r, "null", actual);
+        assertEquals("null", actual, msg);
       }
     }
     rows.nextBatch(batch);
     assertEquals(0, batch.size);
   }
 
-  @Test
-  public void testExpansion() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testExpansion(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString(
             "struct<list1:array<string>," +
@@ -3555,9 +3576,10 @@ public class TestVectorOrcFile {
     rows.close();
   }
 
-  @Test
-  public void testWriterVersion() throws Exception {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testWriterVersion(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
 
     // test writer implementation serialization
     assertEquals(OrcFile.WriterImplementation.ORC_JAVA,
@@ -3606,19 +3628,23 @@ public class TestVectorOrcFile {
         OrcFile.WriterVersion.TRINO_ORIGINAL));
   }
 
-  @Test(expected=IllegalArgumentException.class)
-  public void testBadPrestoVersion() {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
-    OrcFile.WriterVersion.from(OrcFile.WriterImplementation.PRESTO, 0);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testBadPrestoVersion(Version fileFormat) {
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+    assertThrows(IllegalArgumentException.class, () -> {
+      OrcFile.WriterVersion.from(OrcFile.WriterImplementation.PRESTO, 0);
+    });
   }
 
   /**
    * Test whether the file versions are translated correctly
    * @throws Exception
    */
-  @Test
-  public void testFileVersion() throws Exception {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testFileVersion(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
     assertEquals(OrcFile.Version.V_0_11, ReaderImpl.getFileVersion(null));
     assertEquals(OrcFile.Version.V_0_11, ReaderImpl.getFileVersion(new ArrayList<Integer>()));
     assertEquals(OrcFile.Version.V_0_11,
@@ -3629,9 +3655,10 @@ public class TestVectorOrcFile {
         ReaderImpl.getFileVersion(Arrays.asList(new Integer[]{9999, 0})));
   }
 
-  @Test
-  public void testMergeUnderstood() throws Exception {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testMergeUnderstood(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
     Path p = new Path("test.orc");
     Reader futureVersion = Mockito.mock(Reader.class);
     Mockito.when(futureVersion.getFileVersion()).thenReturn(OrcFile.Version.FUTURE);
@@ -3655,8 +3682,9 @@ public class TestVectorOrcFile {
     return Long.toHexString(x).getBytes(StandardCharsets.UTF_8);
   }
 
-  @Test
-  public void testMerge() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testMerge(Version fileFormat) throws Exception {
     Path input1 = new Path(workDir, "TestVectorOrcFile.testMerge1-" +
         fileFormat.getName() + ".orc");
     fs.delete(input1, false);
@@ -3805,7 +3833,8 @@ public class TestVectorOrcFile {
                                     int stripes,
                                     int bufferSize,
                                     String encrypt,
-                                    String mask) throws IOException {
+                                    String mask,
+                                    Version fileFormat) throws IOException {
     fs.delete(path, false);
     TypeDescription schema = TypeDescription.fromString(
         "struct<a:int,b:struct<c:string,d:string>>");
@@ -3856,9 +3885,10 @@ public class TestVectorOrcFile {
     }
   }
 
-  @Test
-  public void testEncryptMerge() throws Exception {
-    Assume.assumeTrue(fileFormat != OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testEncryptMerge(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat != OrcFile.Version.V_0_11);
     Path input1 = new Path(workDir, "TestVectorOrcFile.testEncryptMerge1-" +
                                         fileFormat.getName() + ".orc");
     Path input2 = new Path(workDir, "TestVectorOrcFile.testEncryptMerge2-" +
@@ -3882,12 +3912,12 @@ public class TestVectorOrcFile {
     String mask = "sha256,`don't worry`:b";
 
     // write three files that should merge, each with 3 stripes of 1024 rows.
-    long[] cuts = writeMergeableFile(input1, keystore, 0, 3, 0x400, encryption, mask);
-    writeMergeableFile(input2, keystore, 3 * 1024, 3, 0x800, encryption, mask);
-    writeMergeableFile(input3, keystore, 6 * 1024, 3, 0xc00, encryption, mask);
+    long[] cuts = writeMergeableFile(input1, keystore, 0, 3, 0x400, encryption, mask, fileFormat);
+    writeMergeableFile(input2, keystore, 3 * 1024, 3, 0x800, encryption, mask, fileFormat);
+    writeMergeableFile(input3, keystore, 6 * 1024, 3, 0xc00, encryption, mask, fileFormat);
     // two files that aren't mergeable
-    writeMergeableFile(input4, keystore, 9 * 1024, 3, 0x400, encryption, null);
-    writeMergeableFile(input5, keystore, 12 * 1024, 3, 0x400, null, null);
+    writeMergeableFile(input4, keystore, 9 * 1024, 3, 0x400, encryption, null, fileFormat);
+    writeMergeableFile(input5, keystore, 12 * 1024, 3, 0x400, null, null, fileFormat);
 
     // make sure that we can read up to the intermediate footers
     try (Reader reader = OrcFile.createReader(input1, OrcFile.readerOptions(conf)
@@ -3950,15 +3980,16 @@ public class TestVectorOrcFile {
     List<StripeStatistics> stripeStats = reader.getStripeStatistics();
     for(int s=0; s < stripeStats.size(); ++s) {
       ColumnStatistics[] cs = stripeStats.get(s).getColumnStatistics();
-      assertEquals("stripe " + s, 1024, cs[0].getNumberOfValues());
-      assertEquals("stripe " + s, 0, cs[1].getNumberOfValues());
-      assertEquals("stripe " + s, 1024, cs[2].getNumberOfValues());
-      assertEquals("stripe " + s, 1024, cs[3].getNumberOfValues());
-      assertEquals("stripe " + s, 64, ((StringColumnStatistics) cs[3]).getMinimum().length());
-      assertEquals("stripe " + s, 64, ((StringColumnStatistics) cs[3]).getMaximum().length());
-      assertEquals("stripe " + s, 1024, cs[4].getNumberOfValues());
-      assertEquals("stripe " + s, 64, ((StringColumnStatistics) cs[4]).getMinimum().length());
-      assertEquals("stripe " + s, 64, ((StringColumnStatistics) cs[4]).getMaximum().length());
+      String msg = "stripe " + s;
+      assertEquals(1024, cs[0].getNumberOfValues(), msg);
+      assertEquals(0, cs[1].getNumberOfValues(), msg);
+      assertEquals(1024, cs[2].getNumberOfValues(), msg);
+      assertEquals(1024, cs[3].getNumberOfValues(), msg);
+      assertEquals(64, ((StringColumnStatistics) cs[3]).getMinimum().length(), msg);
+      assertEquals(64, ((StringColumnStatistics) cs[3]).getMaximum().length(), msg);
+      assertEquals(1024, cs[4].getNumberOfValues(), msg);
+      assertEquals(64, ((StringColumnStatistics) cs[4]).getMinimum().length(), msg);
+      assertEquals(64, ((StringColumnStatistics) cs[4]).getMaximum().length(), msg);
     }
     // check the file contents
     RecordReader rows = reader.rows();
@@ -3972,12 +4003,12 @@ public class TestVectorOrcFile {
       assertEquals(1024, batch.size);
       for(int r=0; r < batch.size; ++r) {
         long value = btch * 1024 + r;
-        assertEquals("batch " + btch + " row " + r, true, a.isNull[r]);
-        assertEquals("batch " + btch + " row " + r,
-            computeSha(Long.toHexString(value)), c.toString(r));
-        assertEquals("batch " + btch + " row " + r,
+        String msg = "batch " + btch + " row " + r;
+        assertEquals(true, a.isNull[r], msg);
+        assertEquals(computeSha(Long.toHexString(value)), c.toString(r), msg);
+        assertEquals(
             computeSha(String.format("%010x", value * 1_000_001)),
-            d.toString(r));
+            d.toString(r), msg);
       }
     }
     assertEquals(false, rows.nextBatch(batch));
@@ -4015,21 +4046,22 @@ public class TestVectorOrcFile {
       long low = s * 1024;
       long high = s * 1024 + 1023;
       ColumnStatistics[] cs = stripeStats.get(s).getColumnStatistics();
-      assertEquals("stripe " + s, 1024, cs[0].getNumberOfValues());
-      assertEquals("stripe " + s, 1024, cs[1].getNumberOfValues());
-      assertEquals("stripe " + s, low, ((IntegerColumnStatistics) cs[1]).getMinimum());
-      assertEquals("stripe " + s, high, ((IntegerColumnStatistics) cs[1]).getMaximum());
-      assertEquals("stripe " + s, 1024, cs[2].getNumberOfValues());
-      assertEquals("stripe " + s, 1024, cs[3].getNumberOfValues());
-      assertEquals("stripe " + s, Long.toHexString(low),
-          ((StringColumnStatistics) cs[3]).getMinimum());
-      assertEquals("stripe " + s, s == 0 ? "ff" : Long.toHexString(high),
-          ((StringColumnStatistics) cs[3]).getMaximum());
-      assertEquals("stripe " + s, 1024, cs[4].getNumberOfValues());
-      assertEquals("stripe " + s, String.format("%010x", 1_000_001 * low),
-          ((StringColumnStatistics) cs[4]).getMinimum());
-      assertEquals("stripe " + s, String.format("%010x", 1_000_001 * high),
-          ((StringColumnStatistics) cs[4]).getMaximum());
+      String msg = "stripe " + s;
+      assertEquals(1024, cs[0].getNumberOfValues(), msg);
+      assertEquals(1024, cs[1].getNumberOfValues(), msg);
+      assertEquals(low, ((IntegerColumnStatistics) cs[1]).getMinimum(), msg);
+      assertEquals(high, ((IntegerColumnStatistics) cs[1]).getMaximum(), msg);
+      assertEquals(1024, cs[2].getNumberOfValues(), msg);
+      assertEquals(1024, cs[3].getNumberOfValues(), msg);
+      assertEquals(Long.toHexString(low),
+          ((StringColumnStatistics) cs[3]).getMinimum(), msg);
+      assertEquals(s == 0 ? "ff" : Long.toHexString(high),
+          ((StringColumnStatistics) cs[3]).getMaximum(), msg);
+      assertEquals(1024, cs[4].getNumberOfValues(), msg);
+      assertEquals(String.format("%010x", 1_000_001 * low),
+          ((StringColumnStatistics) cs[4]).getMinimum(), msg);
+      assertEquals(String.format("%010x", 1_000_001 * high),
+          ((StringColumnStatistics) cs[4]).getMaximum(), msg);
     }
     // check the file contents
     rows = reader.rows();
@@ -4038,11 +4070,10 @@ public class TestVectorOrcFile {
       assertEquals(1024, batch.size);
       for(int r=0; r < batch.size; ++r) {
         long value = btch * 1024 + r;
-        assertEquals("batch " + btch + " row " + r, value, a.vector[r]);
-        assertEquals("batch " + btch + " row " + r, Long.toHexString(value),
-            c.toString(r));
-        assertEquals("batch " + btch + " row " + r,
-            String.format("%010x", value * 1_000_001), d.toString(r));
+        String msg = "batch " + btch + " row " + r;
+        assertEquals(value, a.vector[r], msg);
+        assertEquals(Long.toHexString(value), c.toString(r), msg);
+        assertEquals(String.format("%010x", value * 1_000_001), d.toString(r), msg);
       }
     }
     assertEquals(false, rows.nextBatch(batch));
@@ -4077,10 +4108,11 @@ public class TestVectorOrcFile {
   Path exampleDir = new Path(System.getProperty("example.dir",
       "../../examples/"));
 
-  @Test
-  public void testZeroByteOrcFile() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testZeroByteOrcFile(Version fileFormat) throws Exception {
     // we only have to run this test once, since it is a 0 byte file.
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
     Path zeroFile = new Path(exampleDir, "zero.orc");
     Reader reader = OrcFile.createReader(zeroFile, OrcFile.readerOptions(conf));
     assertEquals(0, reader.getNumberOfRows());
@@ -4100,15 +4132,16 @@ public class TestVectorOrcFile {
     assertEquals(false, reader.rows().nextBatch(batch));
   }
 
-  @Test
-  public void testFutureOrcFile() throws Exception {
-    Assume.assumeTrue(fileFormat == OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testFutureOrcFile(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat == OrcFile.Version.V_0_11);
     Path zeroFile = new Path(exampleDir, "version1999.orc");
     try {
       OrcFile.createReader(zeroFile, OrcFile.readerOptions(conf));
-      assertTrue("no exception for bad version", false);
+      assertTrue(false, "no exception for bad version");
     } catch (UnknownFormatException uf) {
-      assertEquals("path is correct", "version1999.orc", uf.getPath().getName());
+      assertEquals("version1999.orc", uf.getPath().getName(), "path is correct");
       assertEquals("19.99", uf.getVersionString());
       OrcProto.PostScript ps = uf.getPostscript();
       assertEquals("ORC", ps.getMagic());
@@ -4116,8 +4149,9 @@ public class TestVectorOrcFile {
     }
   }
 
-  @Test
-  public void testEmptyDoubleStream() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testEmptyDoubleStream(Version fileFormat) throws Exception {
     TypeDescription schema =
         TypeDescription.fromString("struct<list1:array<double>," +
             "list2:array<float>>");
@@ -4151,8 +4185,9 @@ public class TestVectorOrcFile {
     rows.close();
   }
 
-  @Test
-  public void testPredicatePushdownForComplex() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPredicatePushdownForComplex(Version fileFormat) throws Exception {
     TypeDescription schema = createComplexInnerSchema();
     Writer writer = OrcFile.createWriter(testFilePath,
             OrcFile.writerOptions(conf)
@@ -4256,8 +4291,9 @@ public class TestVectorOrcFile {
     assertEquals(3500, rows.getRowNumber());
   }
 
-  @Test
-  public void testPredicatePushdownWithNan() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPredicatePushdownWithNan(Version fileFormat) throws Exception {
     TypeDescription schema = TypeDescription.createStruct()
             .addField("double1", TypeDescription.createDouble());
 
@@ -4317,8 +4353,9 @@ public class TestVectorOrcFile {
    * Test predicate pushdown on nulls, with different combinations of
    * values and nulls.
    */
-  @Test
-  public void testPredicatePushdownAllNulls() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPredicatePushdownAllNulls(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     try (Writer writer = OrcFile.createWriter(testFilePath,
         OrcFile.writerOptions(conf).setSchema(schema).rowIndexStride(1024).version(fileFormat))) {
@@ -4387,8 +4424,9 @@ public class TestVectorOrcFile {
    * Write three row groups, one with (null, null), one with (1, "val"), and one with
    * alternating rows.
    */
-  @Test
-  public void testPredicatePushdownMixedNulls() throws Exception {
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testPredicatePushdownMixedNulls(Version fileFormat) throws Exception {
     TypeDescription schema = createInnerSchema();
     try (Writer writer = OrcFile.createWriter(testFilePath,
                            OrcFile.writerOptions(conf)
@@ -4444,9 +4482,10 @@ public class TestVectorOrcFile {
     }
   }
 
-  @Test
-  public void testColumnEncryption() throws Exception {
-    Assume.assumeTrue(fileFormat != OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testColumnEncryption(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat != OrcFile.Version.V_0_11);
     final int ROWS = 1000;
     final int SEED = 2;
     final Random random = new Random(SEED);
@@ -4527,7 +4566,7 @@ public class TestVectorOrcFile {
     assertEquals(false, x.noNulls);
     assertEquals(true, x.isNull[0]);
     for(int r=0; r < ROWS; ++r) {
-      assertEquals("row " + r, r * 5, norm.vector[r]);
+      assertEquals(r * 5, norm.vector[r], "row " + r);
     }
     assertFalse(rows.nextBatch(batch));
     rows.close();
@@ -4573,23 +4612,25 @@ public class TestVectorOrcFile {
     assertEquals(true, xElem.noNulls);
     assertEquals(true, j.noNulls);
     for(int r=0; r < ROWS; ++r) {
-      assertEquals("row " + r, r * 3, i.vector[r]);
-      assertEquals("row " + r, r * 5, norm.vector[r]);
-      assertEquals("row " + r, r * 3, x.offsets[r]);
-      assertEquals("row " + r, 3, x.lengths[r]);
+      String msg = "row " + r;
+      assertEquals(r * 3, i.vector[r], msg);
+      assertEquals(r * 5, norm.vector[r], msg);
+      assertEquals(r * 3, x.offsets[r], msg);
+      assertEquals(3, x.lengths[r], msg);
       for(int child=0; child < x.lengths[r]; ++child) {
-        assertEquals("row " + r + "." + child, String.format("%d.%d", r, child),
-            xElem.toString((int) x.offsets[r] + child));
+        assertEquals(String.format("%d.%d", r, child),
+            xElem.toString((int) x.offsets[r] + child), msg);
       }
-      assertEquals("row " + r, r * 7, j.vector[r]);
+      assertEquals(r * 7, j.vector[r], msg);
     }
     assertFalse(rows.nextBatch(batch));
     rows.close();
   }
 
-  @Test
-  public void testMultiStripeColumnEncryption() throws Exception {
-    Assume.assumeTrue(fileFormat != OrcFile.Version.V_0_11);
+  @ParameterizedTest
+  @MethodSource("data")
+  public void testMultiStripeColumnEncryption(Version fileFormat) throws Exception {
+    assumeTrue(fileFormat != OrcFile.Version.V_0_11);
     final EncryptionAlgorithm algorithm = EncryptionAlgorithm.AES_CTR_128;
     final int BATCHES = 100;
     final int SEED = 3;
@@ -4674,7 +4715,7 @@ public class TestVectorOrcFile {
   private void checkHasData(RecordReader reader, VectorizedRowBatch batch,
                             int BATCHES, boolean... hasData) throws IOException {
     for(int b=0; b < BATCHES; ++b) {
-      assertEquals("batch " + b, true, reader.nextBatch(batch));
+      assertEquals(true, reader.nextBatch(batch), "batch " + b);
       for(int c=0; c < hasData.length; c++) {
         if (hasData[c]) {
           // the expected value
@@ -4707,21 +4748,21 @@ public class TestVectorOrcFile {
             actual = row -> ((BytesColumnVector) batch.cols[5]).toString(row);
             break;
           }
-          assertEquals("batch " + b + " column " + c, true, batch.cols[c].noNulls);
-          assertEquals("batch " + b + " column " + c + " row 0", expected, actual.apply(0));
+          assertEquals(true, batch.cols[c].noNulls, "batch " + b + " column " + c);
+          assertEquals(expected, actual.apply(0), "batch " + b + " column " + c + " row 0");
           // Not all of the readers set isRepeating, so if it isn't set, check the values.
           if (!batch.cols[c].isRepeating) {
             for(int r=1; r < batch.size; ++r) {
-              assertEquals("batch " + b + " column " + c + " row " + r, expected, actual.apply(r));
+              assertEquals(expected, actual.apply(r), "batch " + b + " column " + c + " row " + r);
             }
           }
         } else {
-          assertEquals("batch " + b + " column " + c, true, batch.cols[c].isRepeating);
-          assertEquals("batch " + b + " column " + c, true, batch.cols[c].isNull[0]);
+          assertEquals(true, batch.cols[c].isRepeating, "batch " + b + " column " + c);
+          assertEquals(true, batch.cols[c].isNull[0], "batch " + b + " column " + c);
         }
       }
     }
-    assertEquals("end", false, reader.nextBatch(batch));
+    assertEquals(false, reader.nextBatch(batch), "end");
     reader.close();
   }
 }