You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ch...@apache.org on 2019/08/28 11:16:49 UTC

[flink] branch java11_test created (now 423772b)

This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a change to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git.


      at 423772b  asm

This branch includes the following new commits:

     new 4e16897  [FLINK-13457][travis] Setup JDK 11 builds
     new 6e27f98  [TMP] run jdk 9 tests by default
     new b5fedec  [TMP][FLINK-13467][build] Bump ASM to 7.1
     new 6342088  [TMP] setup custom flink-shaded install
     new cde5e98  harden StreamTaskTest
     new 4730872  [FLINK-XXXXX][tests] Disable failing tests on Java 11
     new 4512716f add misc/heavvy e2e test runs
     new 93e3433  include hadoop
     new 38a6abf  +fs
     new 84392ad  disable modern kafka e2e
     new 423772b  asm

The 11 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[flink] 07/11: add misc/heavvy e2e test runs

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4512716f6c18328a6bebf601cc3bb0cb5dc2abb1
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Wed Aug 21 14:24:16 2019 +0200

    add misc/heavvy e2e test runs
---
 .travis.yml | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/.travis.yml b/.travis.yml
index 57a4f01..77fd90d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -406,7 +406,11 @@ jobs:
     - if: type in (pull_request, push)
       stage: test
       jdk: "openjdk11"
-      env: PROFILE="-De2e-metrics -Dinclude-kinesis -Djdk11"
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -De2e-metrics"
+      script: ./tools/travis/nightly.sh split_misc_hadoopfree.sh
+      name: misc - jdk11
+    - if: type in (pull_request, push)
+      env: PROFILE="-Dinclude-kinesis -Djdk11"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: ha - jdk11
     - if: type in (pull_request, push)
@@ -417,3 +421,7 @@ jobs:
       env: PROFILE="-Dinclude-kinesis -Djdk11"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: checkpoints - jdk 11
+    - if: type in (pull_request, push)
+      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      script: ./tools/travis/nightly.sh split_heavy.sh
+      name: heavy - jdk 11


[flink] 06/11: [FLINK-XXXXX][tests] Disable failing tests on Java 11

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 47308721c856870c22ebd994e22d392332599ca1
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Mon Jul 29 21:00:28 2019 +0200

    [FLINK-XXXXX][tests] Disable failing tests on Java 11
---
 .../cassandra/CassandraConnectorITCase.java           |  3 +++
 .../connectors/kafka/KafkaShortRetention08ITCase.java |  4 ++++
 .../connectors/kafka/Kafka09SecuredRunITCase.java     |  3 +++
 flink-end-to-end-tests/run-pre-commit-tests.sh        |  4 ++--
 .../apache/flink/testutils/junit/FailsOnJava11.java   | 19 +++----------------
 .../flink/test/classloading/ClassLoaderITCase.java    |  3 +++
 .../flink/yarn/YARNSessionFIFOSecuredITCase.java      |  3 +++
 pom.xml                                               |  7 +++++++
 8 files changed, 28 insertions(+), 18 deletions(-)

diff --git a/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java b/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java
index 0119c16..5aa282e 100644
--- a/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java
+++ b/flink-connectors/flink-connector-cassandra/src/test/java/org/apache/flink/streaming/connectors/cassandra/CassandraConnectorITCase.java
@@ -45,6 +45,7 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.functions.sink.SinkContextUtil;
 import org.apache.flink.streaming.runtime.operators.WriteAheadSinkTestBase;
 import org.apache.flink.table.api.java.StreamTableEnvironment;
+import org.apache.flink.testutils.junit.FailsOnJava11;
 import org.apache.flink.types.Row;
 
 import com.datastax.driver.core.Cluster;
@@ -60,6 +61,7 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -90,6 +92,7 @@ import static org.junit.Assert.assertTrue;
  * IT cases for all cassandra sinks.
  */
 @SuppressWarnings("serial")
+@Category(FailsOnJava11.class)
 public class CassandraConnectorITCase extends WriteAheadSinkTestBase<Tuple3<String, Integer, Integer>, CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>>> {
 
 	private static final Logger LOG = LoggerFactory.getLogger(CassandraConnectorITCase.class);
diff --git a/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java b/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java
index 091fae3..0f164d6 100644
--- a/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java
+++ b/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java
@@ -17,7 +17,10 @@
 
 package org.apache.flink.streaming.connectors.kafka;
 
+import org.apache.flink.testutils.junit.FailsOnJava11;
+
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 /**
  * {@link KafkaShortRetentionTestBase} for Kafka 0.8 .
@@ -26,6 +29,7 @@ import org.junit.Test;
 public class KafkaShortRetention08ITCase extends KafkaShortRetentionTestBase {
 
 	@Test(timeout = 60000)
+	@Category(FailsOnJava11.class)
 	public void testAutoOffsetReset() throws Exception {
 		runAutoOffsetResetTest();
 	}
diff --git a/flink-connectors/flink-connector-kafka-0.9/src/test/java/org/apache/flink/streaming/connectors/kafka/Kafka09SecuredRunITCase.java b/flink-connectors/flink-connector-kafka-0.9/src/test/java/org/apache/flink/streaming/connectors/kafka/Kafka09SecuredRunITCase.java
index f8ce2f5..91b0565 100644
--- a/flink-connectors/flink-connector-kafka-0.9/src/test/java/org/apache/flink/streaming/connectors/kafka/Kafka09SecuredRunITCase.java
+++ b/flink-connectors/flink-connector-kafka-0.9/src/test/java/org/apache/flink/streaming/connectors/kafka/Kafka09SecuredRunITCase.java
@@ -18,16 +18,19 @@
 package org.apache.flink.streaming.connectors.kafka;
 
 import org.apache.flink.test.util.SecureTestEnvironment;
+import org.apache.flink.testutils.junit.FailsOnJava11;
 
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Kafka Secure Connection (kerberos) IT test case.
  */
+@Category(FailsOnJava11.class)
 public class Kafka09SecuredRunITCase extends KafkaConsumerTestBase {
 
 	protected static final Logger LOG = LoggerFactory.getLogger(Kafka09SecuredRunITCase.class);
diff --git a/flink-end-to-end-tests/run-pre-commit-tests.sh b/flink-end-to-end-tests/run-pre-commit-tests.sh
index f72d20c..c32f6d6 100755
--- a/flink-end-to-end-tests/run-pre-commit-tests.sh
+++ b/flink-end-to-end-tests/run-pre-commit-tests.sh
@@ -54,8 +54,8 @@ run_test "Wordcount end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_wo
 run_test "Shaded Hadoop S3A end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_wordcount.sh hadoop"
 run_test "Shaded Presto S3 end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_wordcount.sh presto"
 run_test "Custom FS plugin end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_wordcount.sh dummy-fs"
-run_test "Kafka 0.10 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka010.sh"
-run_test "Kafka 0.11 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka011.sh"
+#run_test "Kafka 0.10 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka010.sh"
+#run_test "Kafka 0.11 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka011.sh"
 run_test "Modern Kafka end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka.sh"
 run_test "Kinesis end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kinesis.sh"
 run_test "class loading end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_classloader.sh"
diff --git a/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java b/flink-test-utils-parent/flink-test-utils-junit/src/main/java/org/apache/flink/testutils/junit/FailsOnJava11.java
similarity index 63%
copy from flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java
copy to flink-test-utils-parent/flink-test-utils-junit/src/main/java/org/apache/flink/testutils/junit/FailsOnJava11.java
index 091fae3..616f1bf 100644
--- a/flink-connectors/flink-connector-kafka-0.8/src/test/java/org/apache/flink/streaming/connectors/kafka/KafkaShortRetention08ITCase.java
+++ b/flink-test-utils-parent/flink-test-utils-junit/src/main/java/org/apache/flink/testutils/junit/FailsOnJava11.java
@@ -15,23 +15,10 @@
  * limitations under the License.
  */
 
-package org.apache.flink.streaming.connectors.kafka;
-
-import org.junit.Test;
+package org.apache.flink.testutils.junit;
 
 /**
- * {@link KafkaShortRetentionTestBase} for Kafka 0.8 .
+ * Marker interface for tests that fail on Java 11.
  */
-@SuppressWarnings("serial")
-public class KafkaShortRetention08ITCase extends KafkaShortRetentionTestBase {
-
-	@Test(timeout = 60000)
-	public void testAutoOffsetReset() throws Exception {
-		runAutoOffsetResetTest();
-	}
-
-	@Test(timeout = 60000)
-	public void testAutoOffsetResetNone() throws Exception {
-		runFailOnAutoOffsetResetNoneEager();
-	}
+public interface FailsOnJava11 {
 }
diff --git a/flink-tests/src/test/java/org/apache/flink/test/classloading/ClassLoaderITCase.java b/flink-tests/src/test/java/org/apache/flink/test/classloading/ClassLoaderITCase.java
index 23503d7..cd421f3 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/classloading/ClassLoaderITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/classloading/ClassLoaderITCase.java
@@ -36,6 +36,7 @@ import org.apache.flink.streaming.util.TestStreamEnvironment;
 import org.apache.flink.test.testdata.KMeansData;
 import org.apache.flink.test.util.SuccessException;
 import org.apache.flink.test.util.TestEnvironment;
+import org.apache.flink.testutils.junit.FailsOnJava11;
 import org.apache.flink.util.ExceptionUtils;
 import org.apache.flink.util.TestLogger;
 
@@ -47,6 +48,7 @@ import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.rules.ExpectedException;
 import org.junit.rules.TemporaryFolder;
 import org.slf4j.Logger;
@@ -197,6 +199,7 @@ public class ClassLoaderITCase extends TestLogger {
 	}
 
 	@Test
+	@Category(FailsOnJava11.class)
 	public void testCheckpointedStreamingClassloaderJobWithCustomClassLoader() throws IOException, ProgramInvocationException {
 		// checkpointed streaming job with custom classes for the checkpoint (FLINK-2543)
 		// the test also ensures that user specific exceptions are serializable between JobManager <--> JobClient.
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
index 3faf777..de15e49 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
@@ -25,6 +25,7 @@ import org.apache.flink.runtime.security.SecurityUtils;
 import org.apache.flink.runtime.security.modules.HadoopModule;
 import org.apache.flink.test.util.SecureTestEnvironment;
 import org.apache.flink.test.util.TestingSecurityContext;
+import org.apache.flink.testutils.junit.FailsOnJava11;
 
 import org.apache.flink.shaded.guava18.com.google.common.collect.Lists;
 
@@ -37,6 +38,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -106,6 +108,7 @@ public class YARNSessionFIFOSecuredITCase extends YARNSessionFIFOITCase {
 
 	@Test(timeout = 60000) // timeout after a minute.
 	@Override
+	@Category(FailsOnJava11.class)
 	public void testDetachedMode() throws Exception {
 		runTest(() -> {
 			runDetachedModeTest();
diff --git a/pom.xml b/pom.xml
index 0d3282f..006c376 100644
--- a/pom.xml
+++ b/pom.xml
@@ -859,6 +859,13 @@ under the License.
 								</dependency>
 							</dependencies>
 						</plugin>
+						<plugin>
+							<groupId>org.apache.maven.plugins</groupId>
+							<artifactId>maven-surefire-plugin</artifactId>
+							<configuration>
+								<excludedGroups>org.apache.flink.testutils.junit.FailsOnJava11</excludedGroups>
+							</configuration>
+						</plugin>
 					</plugins>
 				</pluginManagement>
 


[flink] 10/11: disable modern kafka e2e

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 84392ad2210a8a0083f637a7930b0b8fc9a6e093
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Wed Aug 28 10:28:50 2019 +0200

    disable modern kafka e2e
---
 flink-end-to-end-tests/run-pre-commit-tests.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flink-end-to-end-tests/run-pre-commit-tests.sh b/flink-end-to-end-tests/run-pre-commit-tests.sh
index c32f6d6..999b7db 100755
--- a/flink-end-to-end-tests/run-pre-commit-tests.sh
+++ b/flink-end-to-end-tests/run-pre-commit-tests.sh
@@ -56,7 +56,7 @@ run_test "Shaded Presto S3 end-to-end test" "$END_TO_END_DIR/test-scripts/test_b
 run_test "Custom FS plugin end-to-end test" "$END_TO_END_DIR/test-scripts/test_batch_wordcount.sh dummy-fs"
 #run_test "Kafka 0.10 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka010.sh"
 #run_test "Kafka 0.11 end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka011.sh"
-run_test "Modern Kafka end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka.sh"
+#run_test "Modern Kafka end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kafka.sh"
 run_test "Kinesis end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_kinesis.sh"
 run_test "class loading end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_classloader.sh"
 run_test "Distributed cache end-to-end test" "$END_TO_END_DIR/test-scripts/test_streaming_distributed_cache_via_blob.sh"


[flink] 03/11: [TMP][FLINK-13467][build] Bump ASM to 7.1

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b5fedec8e3a39a289c2890e891f534b4f3a85c13
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Mon Jul 29 12:26:05 2019 +0200

    [TMP][FLINK-13467][build] Bump ASM to 7.1
---
 flink-core/pom.xml                                           |  2 +-
 .../apache/flink/api/java/typeutils/TypeExtractionUtils.java |  4 ++--
 flink-java/pom.xml                                           |  2 +-
 .../main/java/org/apache/flink/api/java/ClosureCleaner.java  | 12 ++++++------
 flink-runtime/pom.xml                                        |  2 +-
 flink-scala/pom.xml                                          |  2 +-
 .../scala/org/apache/flink/api/scala/ClosureCleaner.scala    |  4 ++--
 pom.xml                                                      |  4 ++--
 8 files changed, 16 insertions(+), 16 deletions(-)

diff --git a/flink-core/pom.xml b/flink-core/pom.xml
index 0564f98..3fadf37 100644
--- a/flink-core/pom.xml
+++ b/flink-core/pom.xml
@@ -49,7 +49,7 @@ under the License.
 
 		<dependency>
 			<groupId>org.apache.flink</groupId>
-			<artifactId>flink-shaded-asm-6</artifactId>
+			<artifactId>flink-shaded-asm-7</artifactId>
 		</dependency>
 
 		<!-- standard utilities -->
diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractionUtils.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractionUtils.java
index a49e10a..eef309e 100644
--- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractionUtils.java
+++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractionUtils.java
@@ -35,8 +35,8 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import static org.apache.flink.shaded.asm6.org.objectweb.asm.Type.getConstructorDescriptor;
-import static org.apache.flink.shaded.asm6.org.objectweb.asm.Type.getMethodDescriptor;
+import static org.apache.flink.shaded.asm7.org.objectweb.asm.Type.getConstructorDescriptor;
+import static org.apache.flink.shaded.asm7.org.objectweb.asm.Type.getMethodDescriptor;
 
 @Internal
 public class TypeExtractionUtils {
diff --git a/flink-java/pom.xml b/flink-java/pom.xml
index c1abac0..8634403 100644
--- a/flink-java/pom.xml
+++ b/flink-java/pom.xml
@@ -43,7 +43,7 @@ under the License.
 
 		<dependency>
 			<groupId>org.apache.flink</groupId>
-			<artifactId>flink-shaded-asm-6</artifactId>
+			<artifactId>flink-shaded-asm-7</artifactId>
 		</dependency>
 
 		<dependency>
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/ClosureCleaner.java b/flink-java/src/main/java/org/apache/flink/api/java/ClosureCleaner.java
index b47479f..3e2a6e1 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/ClosureCleaner.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/ClosureCleaner.java
@@ -23,10 +23,10 @@ import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.InvalidProgramException;
 import org.apache.flink.util.InstantiationUtil;
 
-import org.apache.flink.shaded.asm6.org.objectweb.asm.ClassReader;
-import org.apache.flink.shaded.asm6.org.objectweb.asm.ClassVisitor;
-import org.apache.flink.shaded.asm6.org.objectweb.asm.MethodVisitor;
-import org.apache.flink.shaded.asm6.org.objectweb.asm.Opcodes;
+import org.apache.flink.shaded.asm7.org.objectweb.asm.ClassReader;
+import org.apache.flink.shaded.asm7.org.objectweb.asm.ClassVisitor;
+import org.apache.flink.shaded.asm7.org.objectweb.asm.MethodVisitor;
+import org.apache.flink.shaded.asm7.org.objectweb.asm.Opcodes;
 
 import org.apache.commons.lang3.ClassUtils;
 import org.slf4j.Logger;
@@ -248,7 +248,7 @@ class This0AccessFinder extends ClassVisitor {
 	private boolean isThis0Accessed;
 
 	public This0AccessFinder(String this0Name) {
-		super(Opcodes.ASM6);
+		super(Opcodes.ASM7);
 		this.this0Name = this0Name;
 	}
 
@@ -258,7 +258,7 @@ class This0AccessFinder extends ClassVisitor {
 
 	@Override
 	public MethodVisitor visitMethod(int access, String name, String desc, String sig, String[] exceptions) {
-		return new MethodVisitor(Opcodes.ASM6) {
+		return new MethodVisitor(Opcodes.ASM7) {
 
 			@Override
 			public void visitFieldInsn(int op, String owner, String name, String desc) {
diff --git a/flink-runtime/pom.xml b/flink-runtime/pom.xml
index d70e244..c546830 100644
--- a/flink-runtime/pom.xml
+++ b/flink-runtime/pom.xml
@@ -90,7 +90,7 @@ under the License.
 
 		<dependency>
 			<groupId>org.apache.flink</groupId>
-			<artifactId>flink-shaded-asm-6</artifactId>
+			<artifactId>flink-shaded-asm-7</artifactId>
 		</dependency>
 
 		<dependency>
diff --git a/flink-scala/pom.xml b/flink-scala/pom.xml
index 8a6a077..828ac10 100644
--- a/flink-scala/pom.xml
+++ b/flink-scala/pom.xml
@@ -47,7 +47,7 @@ under the License.
 
 		<dependency>
 			<groupId>org.apache.flink</groupId>
-			<artifactId>flink-shaded-asm-6</artifactId>
+			<artifactId>flink-shaded-asm-7</artifactId>
 		</dependency>
 
 		<dependency>
diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/ClosureCleaner.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/ClosureCleaner.scala
index 2cbe9fc..fbc2c1e 100644
--- a/flink-scala/src/main/scala/org/apache/flink/api/scala/ClosureCleaner.scala
+++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/ClosureCleaner.scala
@@ -28,8 +28,8 @@ import org.slf4j.LoggerFactory
 
 import scala.collection.mutable.Map
 import scala.collection.mutable.Set
-import org.apache.flink.shaded.asm6.org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type}
-import org.apache.flink.shaded.asm6.org.objectweb.asm.Opcodes._
+import org.apache.flink.shaded.asm7.org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type}
+import org.apache.flink.shaded.asm7.org.objectweb.asm.Opcodes._
 
 import scala.collection.mutable
 
diff --git a/pom.xml b/pom.xml
index ef17b18..0d3282f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -252,8 +252,8 @@ under the License.
 
 			<dependency>
 				<groupId>org.apache.flink</groupId>
-				<artifactId>flink-shaded-asm-6</artifactId>
-				<version>6.2.1-${flink.shaded.version}</version>
+				<artifactId>flink-shaded-asm-7</artifactId>
+				<version>7.1-8.0</version>
 			</dependency>
 
 			<dependency>


[flink] 08/11: include hadoop

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 93e3433c43331e63e10f7a9f53f83ff6ef446f97
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Wed Aug 21 14:26:26 2019 +0200

    include hadoop
---
 .travis.yml | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 77fd90d..4f87494 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -406,22 +406,22 @@ jobs:
     - if: type in (pull_request, push)
       stage: test
       jdk: "openjdk11"
-      env: PROFILE="-Dinclude-kinesis -Djdk11 -De2e-metrics"
-      script: ./tools/travis/nightly.sh split_misc_hadoopfree.sh
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -Dinclude-hadoop -Dhadoop.version=2.8.3 -De2e-metrics"
+      script: ./tools/travis/nightly.sh split_misc.sh
       name: misc - jdk11
     - if: type in (pull_request, push)
-      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: ha - jdk11
     - if: type in (pull_request, push)
-      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: sticky - jdk 11
     - if: type in (pull_request, push)
-      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: checkpoints - jdk 11
     - if: type in (pull_request, push)
-      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      env: PROFILE="-Dinclude-kinesis -Djdk11 -Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_heavy.sh
       name: heavy - jdk 11


[flink] 09/11: +fs

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 38a6abfde63412e85979d38c7499368b2cd17995
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Wed Aug 28 10:28:34 2019 +0200

    +fs
---
 tools/travis/nightly.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tools/travis/nightly.sh b/tools/travis/nightly.sh
index 44154f8..6fbeda7 100755
--- a/tools/travis/nightly.sh
+++ b/tools/travis/nightly.sh
@@ -30,7 +30,7 @@ SCRIPT=$1
 source ${HERE}/setup_docker.sh
 source ${HERE}/setup_kubernetes.sh
 
-git clone https://github.com/zentol/flink-shaded.git --branch master --single-branch
+git clone https://github.com/apache/flink-shaded.git --branch master --single-branch
 cd flink-shaded
 mvn clean install
 cd ..


[flink] 02/11: [TMP] run jdk 9 tests by default

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 6e27f98dfa200a411b936677c6d0e402bc5813a0
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Mon Jul 29 08:58:45 2019 +0200

    [TMP] run jdk 9 tests by default
---
 .travis.yml | 46 +++++++++++++++++++++++-----------------------
 1 file changed, 23 insertions(+), 23 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index c1869fd..57a4f01 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -73,45 +73,45 @@ jdk: "openjdk8"
 jobs:
   include:
     # main profile
-    - if: type in (pull_request, push)
+    - if: type = cron
       stage: compile
       script: ./tools/travis_controller.sh compile
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: compile
-    - if: type in (pull_request, push)
+    - if: type = cron
       stage: test
       script: ./tools/travis_controller.sh core
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: core
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh python
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: python
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh libraries
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: libraries
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh blink_planner
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: blink_planner
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh connectors
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: connectors
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh kafka/gelly
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: kafka/gelly
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh tests
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: tests
-    - if: type in (pull_request, push)
+    - if: type = cron
       script: ./tools/travis_controller.sh misc
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
       name: misc
-    - if: type in (pull_request, push)
+    - if: type = cron
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
@@ -251,54 +251,54 @@ jobs:
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: cleanup - jdk 9
       # JDK11 profile
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       stage: compile
       script: ./tools/travis_controller.sh compile
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: compile - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       stage: test
       script: ./tools/travis_controller.sh core
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: core - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh python
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: python - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh libraries
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: libraries - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh blink_planner
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: blink_planner - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh connectors
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: connectors - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh kafka/gelly
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: kafka/gelly - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh tests
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: tests - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       script: ./tools/travis_controller.sh misc
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
       name: misc
-    - if: type = cron
+    - if: type in (pull_request, push)
       jdk: "openjdk11"
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
@@ -403,17 +403,17 @@ jobs:
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: checkpoints - jdk 9
     - # E2E profiles - Java 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       stage: test
       jdk: "openjdk11"
       env: PROFILE="-De2e-metrics -Dinclude-kinesis -Djdk11"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: ha - jdk11
-    - if: type = cron
+    - if: type in (pull_request, push)
       env: PROFILE="-Dinclude-kinesis -Djdk11"
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: sticky - jdk 11
-    - if: type = cron
+    - if: type in (pull_request, push)
       env: PROFILE="-Dinclude-kinesis -Djdk11"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: checkpoints - jdk 11


[flink] 11/11: asm

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 423772b5522d6aa01261ba3ab9fc01510b7660d2
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Wed Aug 28 11:39:42 2019 +0200

    asm
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 006c376..932795aa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -253,7 +253,7 @@ under the License.
 			<dependency>
 				<groupId>org.apache.flink</groupId>
 				<artifactId>flink-shaded-asm-7</artifactId>
-				<version>7.1-8.0</version>
+				<version>7.1-9.0</version>
 			</dependency>
 
 			<dependency>


[flink] 01/11: [FLINK-13457][travis] Setup JDK 11 builds

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 4e16897a61882608454a564c3864c69ca561a816
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Mon Jul 29 08:56:33 2019 +0200

    [FLINK-13457][travis] Setup JDK 11 builds
---
 .travis.yml | 69 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 69 insertions(+)

diff --git a/.travis.yml b/.travis.yml
index f114152..c1869fd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -250,6 +250,60 @@ jobs:
       script: ./tools/travis_controller.sh cleanup
       env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: cleanup - jdk 9
+      # JDK11 profile
+    - if: type = cron
+      jdk: "openjdk11"
+      stage: compile
+      script: ./tools/travis_controller.sh compile
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: compile - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      stage: test
+      script: ./tools/travis_controller.sh core
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: core - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh python
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: python - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh libraries
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: libraries - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh blink_planner
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: blink_planner - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh connectors
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: connectors - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh kafka/gelly
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: kafka/gelly - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh tests
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: tests - jdk 11
+    - if: type = cron
+      jdk: "openjdk11"
+      script: ./tools/travis_controller.sh misc
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: misc
+    - if: type = cron
+      jdk: "openjdk11"
+      stage: cleanup
+      script: ./tools/travis_controller.sh cleanup
+      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
+      name: cleanup - jdk 11
     # Documentation 404 check
     - if: type = cron
       stage: test
@@ -348,3 +402,18 @@ jobs:
       env: PROFILE="-Dinclude-kinesis -Djdk9"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: checkpoints - jdk 9
+    - # E2E profiles - Java 11
+    - if: type = cron
+      stage: test
+      jdk: "openjdk11"
+      env: PROFILE="-De2e-metrics -Dinclude-kinesis -Djdk11"
+      script: ./tools/travis/nightly.sh split_ha.sh
+      name: ha - jdk11
+    - if: type = cron
+      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      script: ./tools/travis/nightly.sh split_sticky.sh
+      name: sticky - jdk 11
+    - if: type = cron
+      env: PROFILE="-Dinclude-kinesis -Djdk11"
+      script: ./tools/travis/nightly.sh split_checkpoints.sh
+      name: checkpoints - jdk 11


[flink] 04/11: [TMP] setup custom flink-shaded install

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 63420880fd71170f1e35ce1307b53debc426e849
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Mon Jul 29 12:29:24 2019 +0200

    [TMP] setup custom flink-shaded install
---
 tools/travis/nightly.sh    | 5 +++++
 tools/travis_controller.sh | 5 +++++
 tools/travis_watchdog.sh   | 5 +++++
 3 files changed, 15 insertions(+)

diff --git a/tools/travis/nightly.sh b/tools/travis/nightly.sh
index 433b925..44154f8 100755
--- a/tools/travis/nightly.sh
+++ b/tools/travis/nightly.sh
@@ -30,6 +30,11 @@ SCRIPT=$1
 source ${HERE}/setup_docker.sh
 source ${HERE}/setup_kubernetes.sh
 
+git clone https://github.com/zentol/flink-shaded.git --branch master --single-branch
+cd flink-shaded
+mvn clean install
+cd ..
+
 ARTIFACTS_DIR="${HERE}/artifacts"
 
 mkdir -p $ARTIFACTS_DIR || { echo "FAILURE: cannot create log directory '${ARTIFACTS_DIR}'." ; exit 1; }
diff --git a/tools/travis_controller.sh b/tools/travis_controller.sh
index c0f99f5..a0da514 100755
--- a/tools/travis_controller.sh
+++ b/tools/travis_controller.sh
@@ -80,6 +80,11 @@ function deleteOldCaches() {
 	done
 }
 
+git clone https://github.com/apache/flink-shaded.git --branch master --single-branch
+cd flink-shaded
+mvn clean install
+cd ..
+
 # delete leftover caches from previous builds
 find "$CACHE_DIR" -mindepth 1 -maxdepth 1 | grep -v "$TRAVIS_BUILD_NUMBER" | deleteOldCaches
 
diff --git a/tools/travis_watchdog.sh b/tools/travis_watchdog.sh
index 728bf3b..862ab75 100755
--- a/tools/travis_watchdog.sh
+++ b/tools/travis_watchdog.sh
@@ -27,6 +27,11 @@ fi
 
 source "${HERE}/travis/stage.sh"
 
+git clone https://github.com/apache/flink-shaded.git --branch master --single-branch
+cd flink-shaded
+mvn clean install
+cd ..
+
 ARTIFACTS_DIR="${HERE}/artifacts"
 
 mkdir -p $ARTIFACTS_DIR || { echo "FAILURE: cannot create log directory '${ARTIFACTS_DIR}'." ; exit 1; }


[flink] 05/11: harden StreamTaskTest

Posted by ch...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch java11_test
in repository https://gitbox.apache.org/repos/asf/flink.git

commit cde5e9881d1b731d4c13d963f3d5f625a7cbbfc8
Author: Chesnay Schepler <ch...@apache.org>
AuthorDate: Tue Jul 30 15:06:01 2019 +0200

    harden StreamTaskTest
---
 .../streaming/runtime/tasks/StreamTaskTest.java    | 44 ++++++++--------------
 1 file changed, 16 insertions(+), 28 deletions(-)

diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskTest.java
index 946895a..f773a1a 100644
--- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskTest.java
+++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskTest.java
@@ -125,7 +125,6 @@ import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.Timeout;
-import org.mockito.ArgumentCaptor;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -482,26 +481,27 @@ public class StreamTaskTest extends TestLogger {
 	@Test
 	public void testAsyncCheckpointingConcurrentCloseAfterAcknowledge() throws Exception {
 
-		final OneShotLatch acknowledgeCheckpointLatch = new OneShotLatch();
+		final CompletableFuture<TaskStateSnapshot> snapshotFuture = new CompletableFuture<>();
 		final OneShotLatch completeAcknowledge = new OneShotLatch();
 
-		CheckpointResponder checkpointResponder = mock(CheckpointResponder.class);
-		doAnswer(new Answer() {
+		CheckpointResponder checkpointResponder = new CheckpointResponder() {
 			@Override
-			public Object answer(InvocationOnMock invocation) throws Throwable {
-				acknowledgeCheckpointLatch.trigger();
+			public void acknowledgeCheckpoint(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot subtaskState) {
+				snapshotFuture.complete(subtaskState);
 
 				// block here so that we can issue the concurrent cancel call
-				completeAcknowledge.await();
-
-				return null;
+				try {
+					completeAcknowledge.await();
+				} catch (InterruptedException e) {
+					Thread.currentThread().interrupt();
+				}
 			}
-		}).when(checkpointResponder).acknowledgeCheckpoint(
-			any(JobID.class),
-			any(ExecutionAttemptID.class),
-			anyLong(),
-			any(CheckpointMetrics.class),
-			any(TaskStateSnapshot.class));
+
+			@Override
+			public void declineCheckpoint(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, Throwable cause) {
+				throw new UnsupportedOperationException();
+
+			}};
 
 		TaskStateManager taskStateManager = new TaskStateManagerImpl(
 			new JobID(1L, 2L),
@@ -539,19 +539,7 @@ public class StreamTaskTest extends TestLogger {
 				CheckpointOptions.forCheckpointWithDefaultLocation(),
 				false);
 
-			acknowledgeCheckpointLatch.await();
-
-			ArgumentCaptor<TaskStateSnapshot> subtaskStateCaptor = ArgumentCaptor.forClass(TaskStateSnapshot.class);
-
-			// check that the checkpoint has been completed
-			verify(checkpointResponder).acknowledgeCheckpoint(
-				any(JobID.class),
-				any(ExecutionAttemptID.class),
-				eq(checkpointId),
-				any(CheckpointMetrics.class),
-				subtaskStateCaptor.capture());
-
-			TaskStateSnapshot subtaskStates = subtaskStateCaptor.getValue();
+			TaskStateSnapshot subtaskStates = snapshotFuture.get();
 			OperatorSubtaskState subtaskState = subtaskStates.getSubtaskStateMappings().iterator().next().getValue();
 
 			// check that the subtask state contains the expected state handles