You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by al...@apache.org on 2017/08/25 13:18:08 UTC

[1/2] flink git commit: [FLINK-7429] [kinesis] Add IT tests for migration from 1.2 / 1.3

Repository: flink
Updated Branches:
  refs/heads/release-1.3 93e9dba36 -> 6876abd37


[FLINK-7429] [kinesis] Add IT tests for migration from 1.2 / 1.3


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/8dc5fd03
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/8dc5fd03
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/8dc5fd03

Branch: refs/heads/release-1.3
Commit: 8dc5fd0313644e9f5a73f41fbc3fb6fc677ce731
Parents: 93e9dba
Author: Tzu-Li (Gordon) Tai <tz...@apache.org>
Authored: Fri Aug 18 11:27:38 2017 +0800
Committer: Aljoscha Krettek <al...@gmail.com>
Committed: Thu Aug 24 11:21:05 2017 +0200

----------------------------------------------------------------------
 .../FlinkKinesisConsumerMigrationTest.java      | 204 +++++++++++++------
 ...sumer-migration-test-flink1.2-empty-snapshot | Bin 0 -> 146 bytes
 ...is-consumer-migration-test-flink1.2-snapshot | Bin 0 -> 818 bytes
 ...sumer-migration-test-flink1.3-empty-snapshot | Bin 0 -> 13975 bytes
 ...is-consumer-migration-test-flink1.3-snapshot | Bin 0 -> 14043 bytes
 5 files changed, 147 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/8dc5fd03/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java b/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
index e24a411..3e43f13 100644
--- a/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
+++ b/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
@@ -17,57 +17,104 @@
 
 package org.apache.flink.streaming.connectors.kinesis;
 
-import com.amazonaws.services.kinesis.model.Shard;
 import org.apache.flink.api.common.functions.RuntimeContext;
+import org.apache.flink.core.testutils.OneShotLatch;
 import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.functions.source.SourceFunction;
 import org.apache.flink.streaming.api.operators.StreamSource;
-import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants;
+import org.apache.flink.streaming.connectors.kinesis.config.AWSConfigConstants;
 import org.apache.flink.streaming.connectors.kinesis.internals.KinesisDataFetcher;
-import org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata;
-import org.apache.flink.streaming.connectors.kinesis.model.KinesisStreamShard;
 import org.apache.flink.streaming.connectors.kinesis.model.SequenceNumber;
+import org.apache.flink.streaming.connectors.kinesis.model.StreamShardMetadata;
 import org.apache.flink.streaming.connectors.kinesis.serialization.KinesisDeserializationSchema;
 import org.apache.flink.streaming.connectors.kinesis.testutils.KinesisShardIdGenerator;
+import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
 import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
+import org.apache.flink.streaming.util.OperatorSnapshotUtil;
+import org.apache.flink.streaming.util.migration.MigrationTestUtil;
+import org.apache.flink.streaming.util.migration.MigrationVersion;
+
+import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
-import java.net.URL;
+import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Properties;
+import java.util.concurrent.atomic.AtomicReference;
 
-import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests for checking whether {@link FlinkKinesisConsumer} can restore from snapshots that were
  * done using the Flink 1.1 {@code FlinkKinesisConsumer}.
+ *
+ * <p>For regenerating the binary snapshot files run {@link #writeSnapshot()} on the corresponding
+ * Flink release-* branch.
  */
+@RunWith(Parameterized.class)
 public class FlinkKinesisConsumerMigrationTest {
 
+	/**
+	 * TODO change this to the corresponding savepoint version to be written (e.g. {@link MigrationVersion#v1_3} for 1.3)
+	 * TODO and remove all @Ignore annotations on the writeSnapshot() method to generate savepoints
+	 */
+	private final MigrationVersion flinkGenerateSavepointVersion = null;
+
+	private final static HashMap<StreamShardMetadata, SequenceNumber> TEST_STATE = new HashMap<>();
+	static {
+		StreamShardMetadata shardMetadata = new StreamShardMetadata();
+		shardMetadata.setStreamName("fakeStream1");
+		shardMetadata.setShardId(KinesisShardIdGenerator.generateFromShardOrder(0));
+
+		TEST_STATE.put(shardMetadata, new SequenceNumber("987654321"));
+	}
+
+	private final MigrationVersion testMigrateVersion;
+
+	@Parameterized.Parameters(name = "Migration Savepoint: {0}")
+	public static Collection<MigrationVersion> parameters () {
+		return Arrays.asList(MigrationVersion.v1_1, MigrationVersion.v1_2, MigrationVersion.v1_3);
+	}
+
+	public FlinkKinesisConsumerMigrationTest(MigrationVersion testMigrateVersion) {
+		this.testMigrateVersion = testMigrateVersion;
+	}
+
+	/**
+	 * Manually run this to write binary snapshot data.
+	 */
+	@Ignore
 	@Test
-	public void testRestoreFromFlink11WithEmptyState() throws Exception {
-		Properties testConfig = new Properties();
-		testConfig.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "accessKeyId");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey");
+	public void writeSnapshot() throws Exception {
+		writeSnapshot("src/test/resources/kinesis-consumer-migration-test-flink" + flinkGenerateSavepointVersion + "-snapshot", TEST_STATE);
 
-		final DummyFlinkKafkaConsumer<String> consumerFunction = new DummyFlinkKafkaConsumer<>(testConfig);
+		// write empty state snapshot
+		writeSnapshot("src/test/resources/kinesis-consumer-migration-test-flink" + flinkGenerateSavepointVersion + "-empty-snapshot", new HashMap<>());
+	}
 
-		StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);
+	@Test
+	public void testRestoreWithEmptyState() throws Exception {
+		final DummyFlinkKinesisConsumer<String> consumerFunction = new DummyFlinkKinesisConsumer<>(mock(KinesisDataFetcher.class));
+
+		StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumerFunction);
 
 		final AbstractStreamOperatorTestHarness<String> testHarness =
 			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
 
-		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
-
 		testHarness.setup();
-		// restore state from binary snapshot file using legacy method
-		testHarness.initializeStateFromLegacyCheckpoint(
-			getResourceFilename("kinesis-consumer-migration-test-flink1.1-empty-snapshot"));
+		MigrationTestUtil.restoreFromSnapshot(
+			testHarness,
+			"src/test/resources/kinesis-consumer-migration-test-flink" + testMigrateVersion + "-empty-snapshot", testMigrateVersion);
 		testHarness.open();
 
 		// assert that no state was restored
@@ -78,39 +125,25 @@ public class FlinkKinesisConsumerMigrationTest {
 	}
 
 	@Test
-	public void testRestoreFromFlink11() throws Exception {
-		Properties testConfig = new Properties();
-		testConfig.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "accessKeyId");
-		testConfig.setProperty(ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey");
+	public void testRestore() throws Exception {
+		final DummyFlinkKinesisConsumer<String> consumerFunction = new DummyFlinkKinesisConsumer<>(mock(KinesisDataFetcher.class));
 
-		final DummyFlinkKafkaConsumer<String> consumerFunction = new DummyFlinkKafkaConsumer<>(testConfig);
-
-		StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
+		StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator =
 			new StreamSource<>(consumerFunction);
 
 		final AbstractStreamOperatorTestHarness<String> testHarness =
 			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
 
-		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
-
 		testHarness.setup();
-		// restore state from binary snapshot file using legacy method
-		testHarness.initializeStateFromLegacyCheckpoint(
-			getResourceFilename("kinesis-consumer-migration-test-flink1.1-snapshot"));
+		MigrationTestUtil.restoreFromSnapshot(
+			testHarness,
+			"src/test/resources/kinesis-consumer-migration-test-flink" + testMigrateVersion + "-snapshot", testMigrateVersion);
 		testHarness.open();
 
-		// the expected state in "kafka-consumer-migration-test-flink1.1-snapshot"
-		final HashMap<StreamShardMetadata, SequenceNumber> expectedState = new HashMap<>();
-		expectedState.put(KinesisStreamShard.convertToStreamShardMetadata(new KinesisStreamShard("fakeStream1",
-				new Shard().withShardId(KinesisShardIdGenerator.generateFromShardOrder(0)))),
-			new SequenceNumber("987654321"));
-
 		// assert that state is correctly restored from legacy checkpoint
 		assertNotEquals(null, consumerFunction.getRestoredState());
 		assertEquals(1, consumerFunction.getRestoredState().size());
-		assertEquals(expectedState, consumerFunction.getRestoredState());
+		assertEquals(TEST_STATE, consumerFunction.getRestoredState());
 
 		consumerOperator.close();
 		consumerOperator.cancel();
@@ -118,30 +151,87 @@ public class FlinkKinesisConsumerMigrationTest {
 
 	// ------------------------------------------------------------------------
 
-	private static String getResourceFilename(String filename) {
-		ClassLoader cl = FlinkKinesisConsumerMigrationTest.class.getClassLoader();
-		URL resource = cl.getResource(filename);
-		if (resource == null) {
-			throw new NullPointerException("Missing snapshot resource.");
+	@SuppressWarnings("unchecked")
+	private void writeSnapshot(String path, HashMap<StreamShardMetadata, SequenceNumber> state) throws Exception {
+		final OneShotLatch latch = new OneShotLatch();
+
+		final KinesisDataFetcher<String> fetcher = mock(KinesisDataFetcher.class);
+		doAnswer(new Answer() {
+			@Override
+			public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+				latch.trigger();
+				return null;
+			}
+		}).when(fetcher).runFetcher();
+		when(fetcher.snapshotState()).thenReturn(state);
+
+		final DummyFlinkKinesisConsumer<String> consumer = new DummyFlinkKinesisConsumer<>(fetcher);
+
+		StreamSource<String, DummyFlinkKinesisConsumer<String>> consumerOperator = new StreamSource<>(consumer);
+
+		final AbstractStreamOperatorTestHarness<String> testHarness =
+				new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
+
+		testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
+
+		testHarness.setup();
+		testHarness.open();
+
+		final AtomicReference<Throwable> error = new AtomicReference<>();
+
+		// run the source asynchronously
+		Thread runner = new Thread() {
+			@Override
+			public void run() {
+				try {
+					consumer.run(mock(SourceFunction.SourceContext.class));
+				} catch (Throwable t) {
+					t.printStackTrace();
+					error.set(t);
+				}
+			}
+		};
+		runner.start();
+
+		if (!latch.isTriggered()) {
+			latch.await();
 		}
-		return resource.getFile();
+
+		final OperatorStateHandles snapshot;
+		synchronized (testHarness.getCheckpointLock()) {
+			snapshot = testHarness.snapshot(0L, 0L);
+		}
+
+		OperatorSnapshotUtil.writeStateHandle(snapshot, path);
+
+		consumerOperator.close();
+		runner.join();
 	}
 
-	private static class DummyFlinkKafkaConsumer<T> extends FlinkKinesisConsumer<T> {
-		private static final long serialVersionUID = 1L;
+	private static class DummyFlinkKinesisConsumer<T> extends FlinkKinesisConsumer<T> {
+
+		private KinesisDataFetcher<T> mockFetcher;
+
+		private static Properties dummyConfig = new Properties();
+		static {
+			dummyConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1");
+			dummyConfig.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, "accessKeyId");
+			dummyConfig.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey");
+		}
 
-		@SuppressWarnings("unchecked")
-		DummyFlinkKafkaConsumer(Properties properties) {
-			super("test", mock(KinesisDeserializationSchema.class), properties);
+		DummyFlinkKinesisConsumer(KinesisDataFetcher<T> mockFetcher) {
+			super("dummy-topic", mock(KinesisDeserializationSchema.class), dummyConfig);
+			this.mockFetcher = mockFetcher;
 		}
 
 		@Override
-		protected KinesisDataFetcher<T> createFetcher(List<String> streams,
-													  	SourceFunction.SourceContext<T> sourceContext,
-													  	RuntimeContext runtimeContext,
-													  	Properties configProps,
-													  	KinesisDeserializationSchema<T> deserializationSchema) {
-			return mock(KinesisDataFetcher.class);
+		protected KinesisDataFetcher<T> createFetcher(
+				List<String> streams,
+				SourceContext<T> sourceContext,
+				RuntimeContext runtimeContext,
+				Properties configProps,
+				KinesisDeserializationSchema<T> deserializer) {
+			return mockFetcher;
 		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/8dc5fd03/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-empty-snapshot
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-empty-snapshot b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-empty-snapshot
new file mode 100644
index 0000000..3a55e41
Binary files /dev/null and b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-empty-snapshot differ

http://git-wip-us.apache.org/repos/asf/flink/blob/8dc5fd03/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-snapshot
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-snapshot b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-snapshot
new file mode 100644
index 0000000..229bb2c
Binary files /dev/null and b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.2-snapshot differ

http://git-wip-us.apache.org/repos/asf/flink/blob/8dc5fd03/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-empty-snapshot
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-empty-snapshot b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-empty-snapshot
new file mode 100644
index 0000000..aa981c0
Binary files /dev/null and b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-empty-snapshot differ

http://git-wip-us.apache.org/repos/asf/flink/blob/8dc5fd03/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-snapshot
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-snapshot b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-snapshot
new file mode 100644
index 0000000..ddf8a4d
Binary files /dev/null and b/flink-connectors/flink-connector-kinesis/src/test/resources/kinesis-consumer-migration-test-flink1.3-snapshot differ


[2/2] flink git commit: [FLINK-7429] [kinesis] Unify empty state restore behaviour across 1.1 / 1.2 / 1.3

Posted by al...@apache.org.
[FLINK-7429] [kinesis] Unify empty state restore behaviour across 1.1 / 1.2 / 1.3

Prior to this commit, when restoring empty state from previous Flink
versions, the behaviour was different for each version. For older
versions, restoring empty state results in `null`. For newer versions,
restoring empty state results in an empty map.

We want that an empty map represents "this is a restored run, but there
was no state for us", and a null to represent" this is not a restored
run".


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/6876abd3
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/6876abd3
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/6876abd3

Branch: refs/heads/release-1.3
Commit: 6876abd3730f205986c7df0a9084cc0f60284270
Parents: 8dc5fd0
Author: Tzu-Li (Gordon) Tai <tz...@apache.org>
Authored: Fri Aug 18 21:17:27 2017 +0800
Committer: Aljoscha Krettek <al...@gmail.com>
Committed: Thu Aug 24 13:25:08 2017 +0200

----------------------------------------------------------------------
 .../connectors/kinesis/FlinkKinesisConsumer.java     | 15 ++++++---------
 .../kinesis/FlinkKinesisConsumerMigrationTest.java   |  9 ++++++---
 2 files changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/6876abd3/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java b/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
index ea76ccc..aad622e 100644
--- a/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
+++ b/flink-connectors/flink-connector-kinesis/src/main/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumer.java
@@ -366,15 +366,12 @@ public class FlinkKinesisConsumer<T> extends RichParallelSourceFunction<T> imple
 		LOG.info("Subtask {} restoring offsets from an older Flink version: {}",
 			getRuntimeContext().getIndexOfThisSubtask(), sequenceNumsToRestore);
 
-		if (restoredState.isEmpty()) {
-			sequenceNumsToRestore = null;
-		} else {
-			sequenceNumsToRestore = new HashMap<>();
-			for (Map.Entry<KinesisStreamShard, SequenceNumber> stateEntry : restoredState.entrySet()) {
-				sequenceNumsToRestore.put(
-						KinesisStreamShard.convertToStreamShardMetadata(stateEntry.getKey()),
-						stateEntry.getValue());
-			}
+		sequenceNumsToRestore = new HashMap<>();
+
+		for (Map.Entry<KinesisStreamShard, SequenceNumber> stateEntry : restoredState.entrySet()) {
+			sequenceNumsToRestore.put(
+				KinesisStreamShard.convertToStreamShardMetadata(stateEntry.getKey()),
+				stateEntry.getValue());
 		}
 	}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/6876abd3/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java b/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
index 3e43f13..a55a5c0 100644
--- a/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
+++ b/flink-connectors/flink-connector-kinesis/src/test/java/org/apache/flink/streaming/connectors/kinesis/FlinkKinesisConsumerMigrationTest.java
@@ -50,6 +50,7 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -70,7 +71,7 @@ public class FlinkKinesisConsumerMigrationTest {
 	 */
 	private final MigrationVersion flinkGenerateSavepointVersion = null;
 
-	private final static HashMap<StreamShardMetadata, SequenceNumber> TEST_STATE = new HashMap<>();
+	private static final HashMap<StreamShardMetadata, SequenceNumber> TEST_STATE = new HashMap<>();
 	static {
 		StreamShardMetadata shardMetadata = new StreamShardMetadata();
 		shardMetadata.setStreamName("fakeStream1");
@@ -99,7 +100,9 @@ public class FlinkKinesisConsumerMigrationTest {
 		writeSnapshot("src/test/resources/kinesis-consumer-migration-test-flink" + flinkGenerateSavepointVersion + "-snapshot", TEST_STATE);
 
 		// write empty state snapshot
-		writeSnapshot("src/test/resources/kinesis-consumer-migration-test-flink" + flinkGenerateSavepointVersion + "-empty-snapshot", new HashMap<>());
+		writeSnapshot(
+			"src/test/resources/kinesis-consumer-migration-test-flink" + flinkGenerateSavepointVersion + "-empty-snapshot",
+			new HashMap<StreamShardMetadata, SequenceNumber>());
 	}
 
 	@Test
@@ -118,7 +121,7 @@ public class FlinkKinesisConsumerMigrationTest {
 		testHarness.open();
 
 		// assert that no state was restored
-		assertEquals(null, consumerFunction.getRestoredState());
+		assertTrue(consumerFunction.getRestoredState().isEmpty());
 
 		consumerOperator.close();
 		consumerOperator.cancel();