You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by mg...@apache.org on 2022/01/11 18:39:49 UTC

[avro] branch avro-3120-fix-build-with-jdk16+ created (now 120a3fb)

This is an automated email from the ASF dual-hosted git repository.

mgrigorov pushed a change to branch avro-3120-fix-build-with-jdk16+
in repository https://gitbox.apache.org/repos/asf/avro.git.


      at 120a3fb  AVRO-3120: Support Next Java LTS (Java 17)

This branch includes the following new commits:

     new 120a3fb  AVRO-3120: Support Next Java LTS (Java 17)

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


[avro] 01/01: AVRO-3120: Support Next Java LTS (Java 17)

Posted by mg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mgrigorov pushed a commit to branch avro-3120-fix-build-with-jdk16+
in repository https://gitbox.apache.org/repos/asf/avro.git

commit 120a3fb869e098f0cca14d35dfba1a12eb18bf5c
Author: Martin Tzvetanov Grigorov <mg...@apache.org>
AuthorDate: Tue Jan 11 20:36:56 2022 +0200

    AVRO-3120: Support Next Java LTS (Java 17)
    
    Disable Spotless' removeUnusedImports
    (https://github.com/diffplug/spotless/issues/834). It will be re-enabled
    once Avro is updated to JDK 11+. Then we can add .mvn/jvm.config (see
    https://github.com/diffplug/spotless/issues/834#issuecomment-817524058)
    
    Replace Easymock with Mockito.
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
---
 .github/workflows/test-lang-java.yml               |  1 +
 lang/java/mapred/pom.xml                           |  4 +-
 .../avro/mapreduce/TestAvroKeyInputFormat.java     | 14 +++----
 .../avro/mapreduce/TestAvroKeyOutputFormat.java    | 48 ++++++++++------------
 .../avro/mapreduce/TestAvroKeyRecordReader.java    | 22 +++++-----
 .../avro/mapreduce/TestAvroKeyRecordWriter.java    | 20 ++++-----
 .../mapreduce/TestAvroKeyValueRecordReader.java    | 22 +++++-----
 .../mapreduce/TestAvroKeyValueRecordWriter.java    | 24 ++++-------
 lang/java/pom.xml                                  | 13 +++---
 9 files changed, 72 insertions(+), 96 deletions(-)

diff --git a/.github/workflows/test-lang-java.yml b/.github/workflows/test-lang-java.yml
index b76b9c3..b2d167c 100644
--- a/.github/workflows/test-lang-java.yml
+++ b/.github/workflows/test-lang-java.yml
@@ -38,6 +38,7 @@ jobs:
         java:
         - '8'
         - '11'
+        - '17'
     steps:
       - uses: actions/checkout@v2
 
diff --git a/lang/java/mapred/pom.xml b/lang/java/mapred/pom.xml
index 23f4b88..7a4b8a6 100644
--- a/lang/java/mapred/pom.xml
+++ b/lang/java/mapred/pom.xml
@@ -194,8 +194,8 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
index f46606c..6f10f5c 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
@@ -19,7 +19,7 @@
 package org.apache.avro.mapreduce;
 
 import static org.junit.Assert.*;
-import static org.easymock.EasyMock.*;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 
@@ -45,12 +45,9 @@ public class TestAvroKeyInputFormat {
     AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.STRING));
     Configuration conf = job.getConfiguration();
 
-    FileSplit inputSplit = createMock(FileSplit.class);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
-
-    replay(inputSplit);
-    replay(context);
+    FileSplit inputSplit = mock(FileSplit.class);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     AvroKeyInputFormat inputFormat = new AvroKeyInputFormat();
     @SuppressWarnings("unchecked")
@@ -58,7 +55,6 @@ public class TestAvroKeyInputFormat {
     assertNotNull(inputFormat);
     recordReader.close();
 
-    verify(inputSplit);
-    verify(context);
+    verify(context).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
index 46784db..2932a12 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -29,16 +29,17 @@ import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.reflect.ReflectData;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.easymock.Capture;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.mockito.ArgumentCaptor;
 
 public class TestAvroKeyOutputFormat {
   private static final String SYNC_INTERVAL_KEY = org.apache.avro.mapred.AvroOutputFormat.SYNC_INTERVAL_KEY;
@@ -125,41 +126,34 @@ public class TestAvroKeyOutputFormat {
     job.getConfiguration().set("mapred.output.dir", mTempDir.getRoot().getPath());
     Schema writerSchema = Schema.create(Schema.Type.INT);
     AvroJob.setOutputKeySchema(job, writerSchema);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(job.getConfiguration()).anyTimes();
-    expect(context.getTaskAttemptID()).andReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"))
-        .anyTimes();
-    expect(context.getNumReduceTasks()).andReturn(1);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(job.getConfiguration());
+    when(context.getTaskAttemptID()).thenReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"));
+    when(context.getNumReduceTasks()).thenReturn(1);
 
     // Create a mock record writer.
     @SuppressWarnings("unchecked")
-    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter = createMock(RecordWriter.class);
-    AvroKeyOutputFormat.RecordWriterFactory recordWriterFactory = createMock(
-        AvroKeyOutputFormat.RecordWriterFactory.class);
-
-    // Expect the record writer factory to be called with appropriate parameters.
-    Capture<CodecFactory> capturedCodecFactory = Capture.newInstance();
-    expect(recordWriterFactory.create(eq(writerSchema), anyObject(GenericData.class), capture(capturedCodecFactory), // Capture
-                                                                                                                     // for
-                                                                                                                     // comparison
-                                                                                                                     // later.
-        anyObject(OutputStream.class), eq(expectedSyncInterval))).andReturn(expectedRecordWriter);
-
-    replay(context);
-    replay(expectedRecordWriter);
-    replay(recordWriterFactory);
+    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter = mock(RecordWriter.class);
+    AvroKeyOutputFormat.RecordWriterFactory recordWriterFactory = mock(AvroKeyOutputFormat.RecordWriterFactory.class);
+
+    // when the record writer factory to be called with appropriate parameters.
+    ArgumentCaptor<CodecFactory> capturedCodecFactory = ArgumentCaptor.forClass(CodecFactory.class);
+    when(recordWriterFactory.create(eq(writerSchema), any(GenericData.class), capturedCodecFactory.capture(), // Capture
+                                                                                                              // for
+                                                                                                              // comparison
+                                                                                                              // later.
+        any(OutputStream.class), eq(expectedSyncInterval))).thenReturn(expectedRecordWriter);
 
     AvroKeyOutputFormat<Integer> outputFormat = new AvroKeyOutputFormat<>(recordWriterFactory);
     RecordWriter<AvroKey<Integer>, NullWritable> recordWriter = outputFormat.getRecordWriter(context);
     // Make sure the expected codec was used.
-    assertTrue(capturedCodecFactory.hasCaptured());
+    assertNotNull(capturedCodecFactory.getValue());
     assertEquals(expectedCodec.toString(), capturedCodecFactory.getValue().toString());
 
-    verify(context);
-    verify(expectedRecordWriter);
-    verify(recordWriterFactory);
+    verify(context, atLeastOnce()).getConfiguration();
+    verify(recordWriterFactory).create(eq(writerSchema), any(ReflectData.class), any(CodecFactory.class), any(OutputStream.class), anyInt());
 
     assertNotNull(recordWriter);
-    assertTrue(expectedRecordWriter == recordWriter);
+    assertSame(expectedRecordWriter, recordWriter);
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
index 2940cb3..64ff39a 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -68,18 +68,16 @@ public class TestAvroKeyRecordReader {
     Configuration conf = new Configuration();
 
     // Create a mock input split for this record reader.
-    FileSplit inputSplit = createMock(FileSplit.class);
-    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
-    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
-    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+    FileSplit inputSplit = mock(FileSplit.class);
+    when(inputSplit.getPath()).thenReturn(new Path("/path/to/an/avro/file"));
+    when(inputSplit.getStart()).thenReturn(0L);
+    when(inputSplit.getLength()).thenReturn(avroFileInput.length());
 
     // Create a mock task attempt context for this record reader.
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     // Initialize the record reader.
-    replay(inputSplit);
-    replay(context);
     recordReader.initialize(inputSplit, context);
 
     assertEquals("Progress should be zero before any records are read", 0.0f, recordReader.getProgress(), 0.0f);
@@ -123,7 +121,9 @@ public class TestAvroKeyRecordReader {
     recordReader.close();
 
     // Verify the expected calls on the mocks.
-    verify(inputSplit);
-    verify(context);
+    verify(inputSplit).getPath();
+    verify(inputSplit, times(2)).getStart();
+    verify(inputSplit).getLength();
+    verify(context, atLeastOnce()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
index d338d07..c91cb0b 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
@@ -18,12 +18,10 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -55,9 +53,7 @@ public class TestAvroKeyRecordWriter {
     GenericData dataModel = new ReflectData();
     CodecFactory compressionCodec = CodecFactory.nullCodec();
     ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     // Write an avro container file with two records: 1 and 2.
     AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<>(writerSchema, dataModel, compressionCodec,
@@ -66,8 +62,6 @@ public class TestAvroKeyRecordWriter {
     recordWriter.write(new AvroKey<>(2), NullWritable.get());
     recordWriter.close(context);
 
-    verify(context);
-
     // Verify that the file was written as expected.
     InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = Schema.create(Schema.Type.INT);
@@ -81,6 +75,8 @@ public class TestAvroKeyRecordWriter {
     assertFalse(dataFileReader.hasNext()); // No more records.
 
     dataFileReader.close();
+
+    verify(context, never()).getConfiguration();
   }
 
   @Test
@@ -89,9 +85,7 @@ public class TestAvroKeyRecordWriter {
     GenericData dataModel = new ReflectData();
     CodecFactory compressionCodec = CodecFactory.nullCodec();
     FileOutputStream outputStream = new FileOutputStream(new File("target/temp.avro"));
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     // Write an avro container file with two records: 1 and 2.
     AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<>(writerSchema, dataModel, compressionCodec,
@@ -102,8 +96,6 @@ public class TestAvroKeyRecordWriter {
     recordWriter.write(new AvroKey<>(2), NullWritable.get());
     recordWriter.close(context);
 
-    verify(context);
-
     // Verify that the file was written as expected.
     Configuration conf = new Configuration();
     conf.set("fs.default.name", "file:///");
@@ -120,5 +112,7 @@ public class TestAvroKeyRecordWriter {
     assertEquals(1, dataFileReader.next());
 
     dataFileReader.close();
+
+    verify(context, never()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
index e35a3eb..db4e026 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -80,18 +80,16 @@ public class TestAvroKeyValueRecordReader {
     Configuration conf = new Configuration();
 
     // Create a mock input split for this record reader.
-    FileSplit inputSplit = createMock(FileSplit.class);
-    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
-    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
-    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+    FileSplit inputSplit = mock(FileSplit.class);
+    when(inputSplit.getPath()).thenReturn(new Path("/path/to/an/avro/file"));
+    when(inputSplit.getStart()).thenReturn(0L);
+    when(inputSplit.getLength()).thenReturn(avroFileInput.length());
 
     // Create a mock task attempt context for this record reader.
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     // Initialize the record reader.
-    replay(inputSplit);
-    replay(context);
     recordReader.initialize(inputSplit, context);
 
     assertEquals("Progress should be zero before any records are read", 0.0f, recordReader.getProgress(), 0.0f);
@@ -135,7 +133,9 @@ public class TestAvroKeyValueRecordReader {
     recordReader.close();
 
     // Verify the expected calls on the mocks.
-    verify(inputSplit);
-    verify(context);
+    verify(inputSplit).getPath();
+    verify(inputSplit, times(2)).getStart();
+    verify(inputSplit).getLength();
+    verify(context, atLeastOnce()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index af2d4df..059df7e 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -18,13 +18,11 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -59,9 +57,7 @@ public class TestAvroKeyValueRecordWriter {
   public void testWriteRecords() throws IOException {
     Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
     AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
@@ -85,8 +81,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
 
-    verify(context);
-
     ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), TextStats.SCHEMA$);
     DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(readerSchema);
@@ -109,6 +103,7 @@ public class TestAvroKeyValueRecordWriter {
     // That's all, folks.
     assertFalse(avroFileReader.hasNext());
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 
   public static class R1 {
@@ -120,8 +115,7 @@ public class TestAvroKeyValueRecordWriter {
     Job job = Job.getInstance();
     Schema schema = ReflectData.get().getSchema(R1.class);
     AvroJob.setOutputValueSchema(job, schema);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     R1 record = new R1();
     record.attribute = "test";
@@ -141,8 +135,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("reflectionData"), avroValue);
     writer.close(context);
 
-    verify(context);
-
     ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), schema);
     DatumReader<GenericRecord> datumReader = new ReflectDatumReader<>(readerSchema);
@@ -157,15 +149,14 @@ public class TestAvroKeyValueRecordWriter {
     assertEquals("reflectionData", firstRecord.getKey().toString());
     assertEquals(record.attribute, firstRecord.getValue().attribute);
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 
   @Test
   public void testSyncableWriteRecords() throws IOException {
     Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
     AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
@@ -190,8 +181,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
 
-    verify(context);
-
     Configuration conf = new Configuration();
     conf.set("fs.default.name", "file:///");
     Path avroFile = new Path("target/temp.avro");
@@ -216,5 +205,6 @@ public class TestAvroKeyValueRecordWriter {
 
     // That's all, folks.
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 }
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index 1f103c6..b522806 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -55,7 +55,7 @@
     <commons-compress.version>1.21</commons-compress.version>
     <commons-lang.version>3.12.0</commons-lang.version>
     <tukaani.version>1.9</tukaani.version>
-    <easymock.version>4.3</easymock.version>
+    <mockito.version>4.2.0</mockito.version>
     <hamcrest.version>2.2</hamcrest.version>
     <grpc.version>1.43.1</grpc.version>
     <zstd-jni.version>1.5.1-1</zstd-jni.version>
@@ -289,9 +289,10 @@
               <!-- Avro uses Sun's java code style conventions with 2 spaces, this is a modified version of
                    the eclipse formatter -->
               <file>${main.basedir}/lang/java/eclipse-java-formatter.xml</file>
-              <version>4.19.0</version>
+              <version>4.21.0</version>
             </eclipse>
-            <removeUnusedImports/>
+            <!-- Temporarily disabled for JDK 16+ builds -->
+            <!--<removeUnusedImports/>-->
             <replaceRegex>
               <name>Remove wildcard imports</name>
               <searchRegex>import\s+[^\*\s]+\*;(\r\n|\r|\n)</searchRegex>
@@ -522,9 +523,9 @@
         </exclusions>
       </dependency>
       <dependency>
-        <groupId>org.easymock</groupId>
-        <artifactId>easymock</artifactId>
-        <version>${easymock.version}</version>
+        <groupId>org.mockito</groupId>
+        <artifactId>mockito-core</artifactId>
+        <version>${mockito.version}</version>
       </dependency>
       <dependency>
         <groupId>org.hamcrest</groupId>