You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by mg...@apache.org on 2022/01/14 21:15:34 UTC

[avro] branch branch-1.11 updated: AVRO-3120: Support Next Java LTS (Java 17) (#1454)

This is an automated email from the ASF dual-hosted git repository.

mgrigorov pushed a commit to branch branch-1.11
in repository https://gitbox.apache.org/repos/asf/avro.git


The following commit(s) were added to refs/heads/branch-1.11 by this push:
     new 4a7ba01  AVRO-3120: Support Next Java LTS (Java 17) (#1454)
4a7ba01 is described below

commit 4a7ba0169c7096488f302cf95b4bbb9a3f3452ef
Author: Martin Grigorov <ma...@users.noreply.github.com>
AuthorDate: Thu Jan 13 13:32:29 2022 +0200

    AVRO-3120: Support Next Java LTS (Java 17) (#1454)
    
    * AVRO-3120: Support Next Java LTS (Java 17)
    
    Disable Spotless' removeUnusedImports
    (https://github.com/diffplug/spotless/issues/834). It will be re-enabled
    once Avro is updated to JDK 11+. Then we can add .mvn/jvm.config (see
    https://github.com/diffplug/spotless/issues/834#issuecomment-817524058)
    
    Replace Easymock with Mockito.
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
    
    * AVRO-3120: Downgrade eclipse-java-formatter to the version that supports JDK 8
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
    
    * AVRO-3120: Fix formatting for Spotless
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
    
    * AVRO-3120: Add JDK 17 for the interop tests
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
    
    * AVRO-3120: Install JDK 17 in the Docker image
    
    Signed-off-by: Martin Tzvetanov Grigorov <mg...@apache.org>
    (cherry picked from commit 3a8423fc48417e581d4423d2121cf16154e2dfaf)
---
 .github/workflows/test-lang-java.yml               |  2 +
 lang/java/mapred/pom.xml                           |  4 +-
 .../avro/mapreduce/TestAvroKeyInputFormat.java     | 14 +++----
 .../avro/mapreduce/TestAvroKeyOutputFormat.java    | 49 ++++++++++------------
 .../avro/mapreduce/TestAvroKeyRecordReader.java    | 22 +++++-----
 .../avro/mapreduce/TestAvroKeyRecordWriter.java    | 20 ++++-----
 .../mapreduce/TestAvroKeyValueRecordReader.java    | 22 +++++-----
 .../mapreduce/TestAvroKeyValueRecordWriter.java    | 24 ++++-------
 lang/java/pom.xml                                  | 11 ++---
 share/docker/Dockerfile                            |  2 +
 10 files changed, 75 insertions(+), 95 deletions(-)

diff --git a/.github/workflows/test-lang-java.yml b/.github/workflows/test-lang-java.yml
index b76b9c3..91a01f3 100644
--- a/.github/workflows/test-lang-java.yml
+++ b/.github/workflows/test-lang-java.yml
@@ -38,6 +38,7 @@ jobs:
         java:
         - '8'
         - '11'
+        - '17'
     steps:
       - uses: actions/checkout@v2
 
@@ -69,6 +70,7 @@ jobs:
         java:
         - '8'
         - '11'
+        - '17'
     steps:
       - uses: actions/checkout@v2
 
diff --git a/lang/java/mapred/pom.xml b/lang/java/mapred/pom.xml
index 5b8031d..463bd2e 100644
--- a/lang/java/mapred/pom.xml
+++ b/lang/java/mapred/pom.xml
@@ -194,8 +194,8 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
index f46606c..6f10f5c 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
@@ -19,7 +19,7 @@
 package org.apache.avro.mapreduce;
 
 import static org.junit.Assert.*;
-import static org.easymock.EasyMock.*;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 
@@ -45,12 +45,9 @@ public class TestAvroKeyInputFormat {
     AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.STRING));
     Configuration conf = job.getConfiguration();
 
-    FileSplit inputSplit = createMock(FileSplit.class);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
-
-    replay(inputSplit);
-    replay(context);
+    FileSplit inputSplit = mock(FileSplit.class);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     AvroKeyInputFormat inputFormat = new AvroKeyInputFormat();
     @SuppressWarnings("unchecked")
@@ -58,7 +55,6 @@ public class TestAvroKeyInputFormat {
     assertNotNull(inputFormat);
     recordReader.close();
 
-    verify(inputSplit);
-    verify(context);
+    verify(context).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
index 46784db..440dfec 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.IOException;
 import java.io.OutputStream;
@@ -29,16 +29,17 @@ import org.apache.avro.file.CodecFactory;
 import org.apache.avro.file.DataFileConstants;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.reflect.ReflectData;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.easymock.Capture;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.mockito.ArgumentCaptor;
 
 public class TestAvroKeyOutputFormat {
   private static final String SYNC_INTERVAL_KEY = org.apache.avro.mapred.AvroOutputFormat.SYNC_INTERVAL_KEY;
@@ -125,41 +126,35 @@ public class TestAvroKeyOutputFormat {
     job.getConfiguration().set("mapred.output.dir", mTempDir.getRoot().getPath());
     Schema writerSchema = Schema.create(Schema.Type.INT);
     AvroJob.setOutputKeySchema(job, writerSchema);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(job.getConfiguration()).anyTimes();
-    expect(context.getTaskAttemptID()).andReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"))
-        .anyTimes();
-    expect(context.getNumReduceTasks()).andReturn(1);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(job.getConfiguration());
+    when(context.getTaskAttemptID()).thenReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"));
+    when(context.getNumReduceTasks()).thenReturn(1);
 
     // Create a mock record writer.
     @SuppressWarnings("unchecked")
-    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter = createMock(RecordWriter.class);
-    AvroKeyOutputFormat.RecordWriterFactory recordWriterFactory = createMock(
-        AvroKeyOutputFormat.RecordWriterFactory.class);
-
-    // Expect the record writer factory to be called with appropriate parameters.
-    Capture<CodecFactory> capturedCodecFactory = Capture.newInstance();
-    expect(recordWriterFactory.create(eq(writerSchema), anyObject(GenericData.class), capture(capturedCodecFactory), // Capture
-                                                                                                                     // for
-                                                                                                                     // comparison
-                                                                                                                     // later.
-        anyObject(OutputStream.class), eq(expectedSyncInterval))).andReturn(expectedRecordWriter);
-
-    replay(context);
-    replay(expectedRecordWriter);
-    replay(recordWriterFactory);
+    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter = mock(RecordWriter.class);
+    AvroKeyOutputFormat.RecordWriterFactory recordWriterFactory = mock(AvroKeyOutputFormat.RecordWriterFactory.class);
+
+    // when the record writer factory to be called with appropriate parameters.
+    ArgumentCaptor<CodecFactory> capturedCodecFactory = ArgumentCaptor.forClass(CodecFactory.class);
+    when(recordWriterFactory.create(eq(writerSchema), any(GenericData.class), capturedCodecFactory.capture(), // Capture
+                                                                                                              // for
+                                                                                                              // comparison
+                                                                                                              // later.
+        any(OutputStream.class), eq(expectedSyncInterval))).thenReturn(expectedRecordWriter);
 
     AvroKeyOutputFormat<Integer> outputFormat = new AvroKeyOutputFormat<>(recordWriterFactory);
     RecordWriter<AvroKey<Integer>, NullWritable> recordWriter = outputFormat.getRecordWriter(context);
     // Make sure the expected codec was used.
-    assertTrue(capturedCodecFactory.hasCaptured());
+    assertNotNull(capturedCodecFactory.getValue());
     assertEquals(expectedCodec.toString(), capturedCodecFactory.getValue().toString());
 
-    verify(context);
-    verify(expectedRecordWriter);
-    verify(recordWriterFactory);
+    verify(context, atLeastOnce()).getConfiguration();
+    verify(recordWriterFactory).create(eq(writerSchema), any(ReflectData.class), any(CodecFactory.class),
+        any(OutputStream.class), anyInt());
 
     assertNotNull(recordWriter);
-    assertTrue(expectedRecordWriter == recordWriter);
+    assertSame(expectedRecordWriter, recordWriter);
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
index 2940cb3..64ff39a 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -68,18 +68,16 @@ public class TestAvroKeyRecordReader {
     Configuration conf = new Configuration();
 
     // Create a mock input split for this record reader.
-    FileSplit inputSplit = createMock(FileSplit.class);
-    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
-    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
-    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+    FileSplit inputSplit = mock(FileSplit.class);
+    when(inputSplit.getPath()).thenReturn(new Path("/path/to/an/avro/file"));
+    when(inputSplit.getStart()).thenReturn(0L);
+    when(inputSplit.getLength()).thenReturn(avroFileInput.length());
 
     // Create a mock task attempt context for this record reader.
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     // Initialize the record reader.
-    replay(inputSplit);
-    replay(context);
     recordReader.initialize(inputSplit, context);
 
     assertEquals("Progress should be zero before any records are read", 0.0f, recordReader.getProgress(), 0.0f);
@@ -123,7 +121,9 @@ public class TestAvroKeyRecordReader {
     recordReader.close();
 
     // Verify the expected calls on the mocks.
-    verify(inputSplit);
-    verify(context);
+    verify(inputSplit).getPath();
+    verify(inputSplit, times(2)).getStart();
+    verify(inputSplit).getLength();
+    verify(context, atLeastOnce()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
index d338d07..c91cb0b 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
@@ -18,12 +18,10 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -55,9 +53,7 @@ public class TestAvroKeyRecordWriter {
     GenericData dataModel = new ReflectData();
     CodecFactory compressionCodec = CodecFactory.nullCodec();
     ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     // Write an avro container file with two records: 1 and 2.
     AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<>(writerSchema, dataModel, compressionCodec,
@@ -66,8 +62,6 @@ public class TestAvroKeyRecordWriter {
     recordWriter.write(new AvroKey<>(2), NullWritable.get());
     recordWriter.close(context);
 
-    verify(context);
-
     // Verify that the file was written as expected.
     InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = Schema.create(Schema.Type.INT);
@@ -81,6 +75,8 @@ public class TestAvroKeyRecordWriter {
     assertFalse(dataFileReader.hasNext()); // No more records.
 
     dataFileReader.close();
+
+    verify(context, never()).getConfiguration();
   }
 
   @Test
@@ -89,9 +85,7 @@ public class TestAvroKeyRecordWriter {
     GenericData dataModel = new ReflectData();
     CodecFactory compressionCodec = CodecFactory.nullCodec();
     FileOutputStream outputStream = new FileOutputStream(new File("target/temp.avro"));
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     // Write an avro container file with two records: 1 and 2.
     AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<>(writerSchema, dataModel, compressionCodec,
@@ -102,8 +96,6 @@ public class TestAvroKeyRecordWriter {
     recordWriter.write(new AvroKey<>(2), NullWritable.get());
     recordWriter.close(context);
 
-    verify(context);
-
     // Verify that the file was written as expected.
     Configuration conf = new Configuration();
     conf.set("fs.default.name", "file:///");
@@ -120,5 +112,7 @@ public class TestAvroKeyRecordWriter {
     assertEquals(1, dataFileReader.next());
 
     dataFileReader.close();
+
+    verify(context, never()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
index e35a3eb..db4e026 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
@@ -18,8 +18,8 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
 
 import java.io.File;
 import java.io.IOException;
@@ -80,18 +80,16 @@ public class TestAvroKeyValueRecordReader {
     Configuration conf = new Configuration();
 
     // Create a mock input split for this record reader.
-    FileSplit inputSplit = createMock(FileSplit.class);
-    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
-    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
-    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+    FileSplit inputSplit = mock(FileSplit.class);
+    when(inputSplit.getPath()).thenReturn(new Path("/path/to/an/avro/file"));
+    when(inputSplit.getStart()).thenReturn(0L);
+    when(inputSplit.getLength()).thenReturn(avroFileInput.length());
 
     // Create a mock task attempt context for this record reader.
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
+    when(context.getConfiguration()).thenReturn(conf);
 
     // Initialize the record reader.
-    replay(inputSplit);
-    replay(context);
     recordReader.initialize(inputSplit, context);
 
     assertEquals("Progress should be zero before any records are read", 0.0f, recordReader.getProgress(), 0.0f);
@@ -135,7 +133,9 @@ public class TestAvroKeyValueRecordReader {
     recordReader.close();
 
     // Verify the expected calls on the mocks.
-    verify(inputSplit);
-    verify(context);
+    verify(inputSplit).getPath();
+    verify(inputSplit, times(2)).getStart();
+    verify(inputSplit).getLength();
+    verify(context, atLeastOnce()).getConfiguration();
   }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index af2d4df..059df7e 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -18,13 +18,11 @@
 
 package org.apache.avro.mapreduce;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -59,9 +57,7 @@ public class TestAvroKeyValueRecordWriter {
   public void testWriteRecords() throws IOException {
     Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
     AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
@@ -85,8 +81,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
 
-    verify(context);
-
     ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), TextStats.SCHEMA$);
     DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(readerSchema);
@@ -109,6 +103,7 @@ public class TestAvroKeyValueRecordWriter {
     // That's all, folks.
     assertFalse(avroFileReader.hasNext());
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 
   public static class R1 {
@@ -120,8 +115,7 @@ public class TestAvroKeyValueRecordWriter {
     Job job = Job.getInstance();
     Schema schema = ReflectData.get().getSchema(R1.class);
     AvroJob.setOutputValueSchema(job, schema);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     R1 record = new R1();
     record.attribute = "test";
@@ -141,8 +135,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("reflectionData"), avroValue);
     writer.close(context);
 
-    verify(context);
-
     ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
     Schema readerSchema = AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), schema);
     DatumReader<GenericRecord> datumReader = new ReflectDatumReader<>(readerSchema);
@@ -157,15 +149,14 @@ public class TestAvroKeyValueRecordWriter {
     assertEquals("reflectionData", firstRecord.getKey().toString());
     assertEquals(record.attribute, firstRecord.getValue().attribute);
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 
   @Test
   public void testSyncableWriteRecords() throws IOException {
     Job job = Job.getInstance();
     AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
-    TaskAttemptContext context = createMock(TaskAttemptContext.class);
-
-    replay(context);
+    TaskAttemptContext context = mock(TaskAttemptContext.class);
 
     AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
     AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
@@ -190,8 +181,6 @@ public class TestAvroKeyValueRecordWriter {
     writer.write(new Text("banana"), new AvroValue<>(bananaStats));
     writer.close(context);
 
-    verify(context);
-
     Configuration conf = new Configuration();
     conf.set("fs.default.name", "file:///");
     Path avroFile = new Path("target/temp.avro");
@@ -216,5 +205,6 @@ public class TestAvroKeyValueRecordWriter {
 
     // That's all, folks.
     avroFileReader.close();
+    verify(context, never()).getConfiguration();
   }
 }
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index 2646f10..20fff49 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -55,7 +55,7 @@
     <commons-compress.version>1.21</commons-compress.version>
     <commons-lang.version>3.12.0</commons-lang.version>
     <tukaani.version>1.9</tukaani.version>
-    <easymock.version>4.3</easymock.version>
+    <mockito.version>4.2.0</mockito.version>
     <hamcrest.version>2.2</hamcrest.version>
     <grpc.version>1.43.1</grpc.version>
     <zstd-jni.version>1.5.1-1</zstd-jni.version>
@@ -291,7 +291,8 @@
               <file>${main.basedir}/lang/java/eclipse-java-formatter.xml</file>
               <version>4.19.0</version>
             </eclipse>
-            <removeUnusedImports/>
+            <!-- Temporarily disabled for JDK 16+ builds -->
+            <!--<removeUnusedImports/>-->
             <replaceRegex>
               <name>Remove wildcard imports</name>
               <searchRegex>import\s+[^\*\s]+\*;(\r\n|\r|\n)</searchRegex>
@@ -522,9 +523,9 @@
         </exclusions>
       </dependency>
       <dependency>
-        <groupId>org.easymock</groupId>
-        <artifactId>easymock</artifactId>
-        <version>${easymock.version}</version>
+        <groupId>org.mockito</groupId>
+        <artifactId>mockito-core</artifactId>
+        <version>${mockito.version}</version>
       </dependency>
       <dependency>
         <groupId>org.hamcrest</groupId>
diff --git a/share/docker/Dockerfile b/share/docker/Dockerfile
index a2526f0..aff264b 100644
--- a/share/docker/Dockerfile
+++ b/share/docker/Dockerfile
@@ -52,6 +52,7 @@ RUN apt-get -qqy update \
                                                  libssl-dev \
                                                  make \
                                                  mypy \
+                                                 openjdk-17-jdk \
                                                  openjdk-11-jdk \
                                                  openjdk-8-jdk \
                                                  perl \
@@ -191,6 +192,7 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --de
 # Note: This "ubertool" container has two JDK versions:
 # - OpenJDK 8
 # - OpenJDK 11
+# - OpenJDK 17
 # - The root build.sh script switches between the versions according to
 #   the JAVA environment variable.