You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by dw...@apache.org on 2021/11/05 08:48:28 UTC

[flink-benchmarks] 01/02: [hotfix] Reformat java code

This is an automated email from the ASF dual-hosted git repository.

dwysakowicz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink-benchmarks.git

commit 17013937df264f927590a855abb90c4ace2a41d2
Author: Piotr Nowojski <pi...@gmail.com>
AuthorDate: Mon Nov 1 18:00:56 2021 +0100

    [hotfix] Reformat java code
---
 .../benchmark/AsyncWaitOperatorBenchmark.java      |  129 +-
 .../benchmark/BlockingPartitionBenchmark.java      |  180 +-
 .../BlockingPartitionRemoteChannelBenchmark.java   |   23 +-
 .../benchmark/CheckpointEnvironmentContext.java    |   72 +-
 .../org/apache/flink/benchmark/CollectSink.java    |   14 +-
 .../ContinuousFileReaderOperatorBenchmark.java     |   38 +-
 .../flink/benchmark/FlinkEnvironmentContext.java   |   30 +-
 .../org/apache/flink/benchmark/InputBenchmark.java |   71 +-
 .../apache/flink/benchmark/KeyByBenchmarks.java    |  134 +-
 .../benchmark/MemoryStateBackendBenchmark.java     |   47 +-
 .../flink/benchmark/MultipleInputBenchmark.java    |  283 +--
 .../flink/benchmark/RemoteBenchmarkBase.java       |   57 +-
 .../benchmark/RocksStateBackendBenchmark.java      |   63 +-
 .../SerializationFrameworkMiniBenchmarks.java      |  960 ++++----
 .../benchmark/SortingBoundedInputBenchmarks.java   |   28 +-
 .../flink/benchmark/StateBackendBenchmarkBase.java |  111 +-
 .../apache/flink/benchmark/StreamGraphUtils.java   |   23 +-
 .../apache/flink/benchmark/TwoInputBenchmark.java  |  123 +-
 .../apache/flink/benchmark/WindowBenchmarks.java   |   82 +-
 .../benchmark/full/PojoSerializationBenchmark.java |   73 +-
 .../full/SerializationFrameworkAllBenchmarks.java  |  554 ++---
 .../full/StringSerializationBenchmark.java         |   36 +-
 .../functions/BaseSourceWithKeyRange.java          |   77 +-
 .../benchmark/functions/IntLongApplications.java   |    3 +-
 .../benchmark/functions/IntegerLongSource.java     |   56 +-
 .../flink/benchmark/functions/LongNewSource.java   |   45 +-
 .../flink/benchmark/functions/LongSourceType.java  |   51 +-
 .../benchmark/functions/MultiplyIntLongByTwo.java  |    3 +-
 .../benchmark/functions/QueuingLongSource.java     |   46 +-
 .../benchmark/functions/SuccessException.java      |    3 +-
 .../benchmark/functions/SumReduceIntLong.java      |    3 +-
 .../flink/benchmark/functions/TestUtils.java       |   13 +-
 .../operators/MultiplyByTwoCoStreamMap.java        |   21 +-
 .../operators/MultiplyByTwoOperatorFactory.java    |   60 +-
 .../flink/benchmark/operators/RecordSource.java    |   29 +-
 .../apache/flink/benchmark/thrift/MyOperation.java |  892 ++++----
 .../org/apache/flink/benchmark/thrift/MyPojo.java  | 2322 ++++++++++----------
 .../benchmark/SchedulerBenchmarkExecutorBase.java  |   33 +-
 ...DownstreamTasksInBatchJobBenchmarkExecutor.java |   47 +-
 ...loyingTasksInStreamingJobBenchmarkExecutor.java |   44 +-
 .../e2e/CreateSchedulerBenchmarkExecutor.java      |   44 +-
 .../SchedulingAndDeployingBenchmarkExecutor.java   |   44 +-
 ...RegionToRestartInBatchJobBenchmarkExecutor.java |   44 +-
 ...onToRestartInStreamingJobBenchmarkExecutor.java |   44 +-
 ...artitionReleaseInBatchJobBenchmarkExecutor.java |   44 +-
 .../InitSchedulingStrategyBenchmarkExecutor.java   |   44 +-
 ...DownstreamTasksInBatchJobBenchmarkExecutor.java |   59 +-
 .../BuildExecutionGraphBenchmarkExecutor.java      |   44 +-
 .../flink/state/benchmark/ListStateBenchmark.java  |   24 +-
 .../flink/state/benchmark/MapStateBenchmark.java   |   20 +-
 .../flink/state/benchmark/StateBenchmarkBase.java  |   33 +-
 .../state/benchmark/StateBenchmarkConstants.java   |   16 +-
 .../flink/state/benchmark/ValueStateBenchmark.java |   18 +-
 ...ewStreamNetworkThroughputBenchmarkExecutor.java |   67 +-
 ...etworkBroadcastThroughputBenchmarkExecutor.java |   63 +-
 .../StreamNetworkLatencyBenchmarkExecutor.java     |   60 +-
 .../StreamNetworkThroughputBenchmarkExecutor.java  |  173 +-
 57 files changed, 3955 insertions(+), 3765 deletions(-)

diff --git a/src/main/java/org/apache/flink/benchmark/AsyncWaitOperatorBenchmark.java b/src/main/java/org/apache/flink/benchmark/AsyncWaitOperatorBenchmark.java
index f3fae5d..b61fd0c 100644
--- a/src/main/java/org/apache/flink/benchmark/AsyncWaitOperatorBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/AsyncWaitOperatorBenchmark.java
@@ -44,72 +44,65 @@ import java.util.concurrent.TimeUnit;
 
 @OperationsPerInvocation(value = AsyncWaitOperatorBenchmark.RECORDS_PER_INVOCATION)
 public class AsyncWaitOperatorBenchmark extends BenchmarkBase {
-	public static final int RECORDS_PER_INVOCATION = 1_000_000;
-
-	private static final long CHECKPOINT_INTERVAL_MS = 100;
-
-	private static ExecutorService executor;
-
-	@Param
-	public AsyncDataStream.OutputMode outputMode;
-
-	public static void main(String[] args)
-		throws RunnerException {
-		Options options = new OptionsBuilder()
-			.verbosity(VerboseMode.NORMAL)
-			.include(".*" + AsyncWaitOperatorBenchmark.class.getCanonicalName() + ".*")
-			.build();
-
-		new Runner(options).run();
-	}
-
-	@Setup
-	public void setUp() {
-		executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
-	}
-
-	@TearDown
-	public void tearDown() {
-		executor.shutdown();
-	}
-
-	@Benchmark
-	public void asyncWait(FlinkEnvironmentContext context) throws Exception {
-
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-		env.setParallelism(1);
-
-		DataStreamSource<Long> source = env.addSource(new LongSource(RECORDS_PER_INVOCATION));
-		DataStream<Long> result = createAsyncOperator(source);
-		result.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	private DataStream<Long> createAsyncOperator(DataStreamSource<Long> source) {
-		switch (outputMode) {
-			case ORDERED:
-				return AsyncDataStream.orderedWait(
-						source,
-						new BenchmarkAsyncFunctionExecutor(),
-						0,
-						TimeUnit.MILLISECONDS);
-			case UNORDERED:
-				return AsyncDataStream.unorderedWait(
-						source,
-						new BenchmarkAsyncFunctionExecutor(),
-						0,
-						TimeUnit.MILLISECONDS);
-			default:
-				throw new UnsupportedOperationException("Unknown mode");
-		}
-	}
-
-	private static class BenchmarkAsyncFunctionExecutor extends RichAsyncFunction<Long, Long> {
-		@Override
-		public void asyncInvoke(Long input, ResultFuture<Long> resultFuture) {
-			executor.execute(() -> resultFuture.complete(Collections.singleton(input * 2)));
-		}
-	}
+    public static final int RECORDS_PER_INVOCATION = 1_000_000;
+
+    private static final long CHECKPOINT_INTERVAL_MS = 100;
+
+    private static ExecutorService executor;
+
+    @Param public AsyncDataStream.OutputMode outputMode;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + AsyncWaitOperatorBenchmark.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Setup
+    public void setUp() {
+        executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
+    }
+
+    @TearDown
+    public void tearDown() {
+        executor.shutdown();
+    }
+
+    @Benchmark
+    public void asyncWait(FlinkEnvironmentContext context) throws Exception {
+
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+        env.setParallelism(1);
+
+        DataStreamSource<Long> source = env.addSource(new LongSource(RECORDS_PER_INVOCATION));
+        DataStream<Long> result = createAsyncOperator(source);
+        result.addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    private DataStream<Long> createAsyncOperator(DataStreamSource<Long> source) {
+        switch (outputMode) {
+            case ORDERED:
+                return AsyncDataStream.orderedWait(
+                        source, new BenchmarkAsyncFunctionExecutor(), 0, TimeUnit.MILLISECONDS);
+            case UNORDERED:
+                return AsyncDataStream.unorderedWait(
+                        source, new BenchmarkAsyncFunctionExecutor(), 0, TimeUnit.MILLISECONDS);
+            default:
+                throw new UnsupportedOperationException("Unknown mode");
+        }
+    }
+
+    private static class BenchmarkAsyncFunctionExecutor extends RichAsyncFunction<Long, Long> {
+        @Override
+        public void asyncInvoke(Long input, ResultFuture<Long> resultFuture) {
+            executor.execute(() -> resultFuture.complete(Collections.singleton(input * 2)));
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/BlockingPartitionBenchmark.java b/src/main/java/org/apache/flink/benchmark/BlockingPartitionBenchmark.java
index 4bc03ad..7a30525 100644
--- a/src/main/java/org/apache/flink/benchmark/BlockingPartitionBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/BlockingPartitionBenchmark.java
@@ -33,93 +33,101 @@ import org.openjdk.jmh.runner.options.Options;
 import org.openjdk.jmh.runner.options.OptionsBuilder;
 import org.openjdk.jmh.runner.options.VerboseMode;
 
-/**
- * JMH throughput benchmark runner.
- */
+/** JMH throughput benchmark runner. */
 @OperationsPerInvocation(value = BlockingPartitionBenchmark.RECORDS_PER_INVOCATION)
 public class BlockingPartitionBenchmark extends BenchmarkBase {
 
-	public static final int RECORDS_PER_INVOCATION = 15_000_000;
-
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + BlockingPartitionBenchmark.class.getCanonicalName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	public void uncompressedFilePartition(UncompressedFileEnvironmentContext context) throws Exception {
-		executeBenchmark(context.env);
-	}
-
-	@Benchmark
-	public void compressedFilePartition(CompressedFileEnvironmentContext context) throws Exception {
-		executeBenchmark(context.env);
-	}
-
-	@Benchmark
-	public void uncompressedMmapPartition(UncompressedMmapEnvironmentContext context) throws Exception {
-		executeBenchmark(context.env);
-	}
-
-	private void executeBenchmark(StreamExecutionEnvironment env) throws Exception {
-		StreamGraph streamGraph = StreamGraphUtils.buildGraphForBatchJob(env, RECORDS_PER_INVOCATION);
-		env.execute(streamGraph);
-	}
-
-	/**
-	 * Setup for the benchmark(s).
-	 */
-	public static class BlockingPartitionEnvironmentContext extends FlinkEnvironmentContext {
-
-		/**
-		 * Parallelism of 1 causes the reads/writes to be always sequential and only covers the case
-		 * of one reader. More parallelism should be more suitable for finding performance regressions
-		 * of the code. Considering that the benchmarking machine has 4 CPU cores, we set the parallelism
-		 * to 4.
-		 */
-		private final int parallelism = 4;
-
-		@Override
-		public void setUp() throws Exception {
-			super.setUp();
-
-			env.setParallelism(parallelism);
-			env.setBufferTimeout(-1);
-		}
-
-		protected Configuration createConfiguration(boolean compressionEnabled, String subpartitionType) {
-			Configuration configuration = super.createConfiguration();
-
-			configuration.setBoolean(NettyShuffleEnvironmentOptions.BLOCKING_SHUFFLE_COMPRESSION_ENABLED, compressionEnabled);
-			configuration.setString(NettyShuffleEnvironmentOptions.NETWORK_BLOCKING_SHUFFLE_TYPE, subpartitionType);
-			configuration.setString(CoreOptions.TMP_DIRS, FileUtils.getCurrentWorkingDirectory().toAbsolutePath().toString());
-			return configuration;
-		}
-	}
-
-	public static class UncompressedFileEnvironmentContext extends BlockingPartitionEnvironmentContext {
-		@Override
-		protected Configuration createConfiguration() {
-			return createConfiguration(false, "file");
-		}
-	}
-
-	public static class CompressedFileEnvironmentContext extends BlockingPartitionEnvironmentContext {
-		@Override
-		protected Configuration createConfiguration() {
-			return createConfiguration(true, "file");
-		}
-	}
-
-	public static class UncompressedMmapEnvironmentContext extends BlockingPartitionEnvironmentContext {
-		@Override
-		protected Configuration createConfiguration() {
-			return createConfiguration(false, "mmap");
-		}
-	}
+    public static final int RECORDS_PER_INVOCATION = 15_000_000;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + BlockingPartitionBenchmark.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    public void uncompressedFilePartition(UncompressedFileEnvironmentContext context)
+            throws Exception {
+        executeBenchmark(context.env);
+    }
+
+    @Benchmark
+    public void compressedFilePartition(CompressedFileEnvironmentContext context) throws Exception {
+        executeBenchmark(context.env);
+    }
+
+    @Benchmark
+    public void uncompressedMmapPartition(UncompressedMmapEnvironmentContext context)
+            throws Exception {
+        executeBenchmark(context.env);
+    }
+
+    private void executeBenchmark(StreamExecutionEnvironment env) throws Exception {
+        StreamGraph streamGraph =
+                StreamGraphUtils.buildGraphForBatchJob(env, RECORDS_PER_INVOCATION);
+        env.execute(streamGraph);
+    }
+
+    /** Setup for the benchmark(s). */
+    public static class BlockingPartitionEnvironmentContext extends FlinkEnvironmentContext {
+
+        /**
+         * Parallelism of 1 causes the reads/writes to be always sequential and only covers the case
+         * of one reader. More parallelism should be more suitable for finding performance
+         * regressions of the code. Considering that the benchmarking machine has 4 CPU cores, we
+         * set the parallelism to 4.
+         */
+        private final int parallelism = 4;
+
+        @Override
+        public void setUp() throws Exception {
+            super.setUp();
+
+            env.setParallelism(parallelism);
+            env.setBufferTimeout(-1);
+        }
+
+        protected Configuration createConfiguration(
+                boolean compressionEnabled, String subpartitionType) {
+            Configuration configuration = super.createConfiguration();
+
+            configuration.setBoolean(
+                    NettyShuffleEnvironmentOptions.BLOCKING_SHUFFLE_COMPRESSION_ENABLED,
+                    compressionEnabled);
+            configuration.setString(
+                    NettyShuffleEnvironmentOptions.NETWORK_BLOCKING_SHUFFLE_TYPE, subpartitionType);
+            configuration.setString(
+                    CoreOptions.TMP_DIRS,
+                    FileUtils.getCurrentWorkingDirectory().toAbsolutePath().toString());
+            return configuration;
+        }
+    }
+
+    public static class UncompressedFileEnvironmentContext
+            extends BlockingPartitionEnvironmentContext {
+        @Override
+        protected Configuration createConfiguration() {
+            return createConfiguration(false, "file");
+        }
+    }
+
+    public static class CompressedFileEnvironmentContext
+            extends BlockingPartitionEnvironmentContext {
+        @Override
+        protected Configuration createConfiguration() {
+            return createConfiguration(true, "file");
+        }
+    }
+
+    public static class UncompressedMmapEnvironmentContext
+            extends BlockingPartitionEnvironmentContext {
+        @Override
+        protected Configuration createConfiguration() {
+            return createConfiguration(false, "mmap");
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/BlockingPartitionRemoteChannelBenchmark.java b/src/main/java/org/apache/flink/benchmark/BlockingPartitionRemoteChannelBenchmark.java
index e5d3397..8752798 100644
--- a/src/main/java/org/apache/flink/benchmark/BlockingPartitionRemoteChannelBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/BlockingPartitionRemoteChannelBenchmark.java
@@ -39,10 +39,11 @@ public class BlockingPartitionRemoteChannelBenchmark extends RemoteBenchmarkBase
     private static final int NUM_VERTICES = 2;
 
     public static void main(String[] args) throws RunnerException {
-        Options options = new OptionsBuilder()
-            .verbosity(VerboseMode.NORMAL)
-            .include(BlockingPartitionRemoteChannelBenchmark.class.getCanonicalName())
-            .build();
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(BlockingPartitionRemoteChannelBenchmark.class.getCanonicalName())
+                        .build();
 
         new Runner(options).run();
     }
@@ -54,13 +55,12 @@ public class BlockingPartitionRemoteChannelBenchmark extends RemoteBenchmarkBase
 
     @Benchmark
     public void remoteFilePartition(BlockingPartitionEnvironmentContext context) throws Exception {
-        StreamGraph streamGraph = StreamGraphUtils.buildGraphForBatchJob(context.env, RECORDS_PER_INVOCATION);
+        StreamGraph streamGraph =
+                StreamGraphUtils.buildGraphForBatchJob(context.env, RECORDS_PER_INVOCATION);
         miniCluster.executeJobBlocking(StreamingJobGraphGenerator.createJobGraph(streamGraph));
     }
 
-    /**
-     * Environment context for specific file based bounded blocking partition.
-     */
+    /** Environment context for specific file based bounded blocking partition. */
     public static class BlockingPartitionEnvironmentContext extends FlinkEnvironmentContext {
 
         @Override
@@ -75,8 +75,11 @@ public class BlockingPartitionRemoteChannelBenchmark extends RemoteBenchmarkBase
         protected Configuration createConfiguration() {
             Configuration configuration = super.createConfiguration();
 
-            configuration.setString(NettyShuffleEnvironmentOptions.NETWORK_BLOCKING_SHUFFLE_TYPE, "file");
-            configuration.setString(CoreOptions.TMP_DIRS, FileUtils.getCurrentWorkingDirectory().toAbsolutePath().toString());
+            configuration.setString(
+                    NettyShuffleEnvironmentOptions.NETWORK_BLOCKING_SHUFFLE_TYPE, "file");
+            configuration.setString(
+                    CoreOptions.TMP_DIRS,
+                    FileUtils.getCurrentWorkingDirectory().toAbsolutePath().toString());
             return configuration;
         }
     }
diff --git a/src/main/java/org/apache/flink/benchmark/CheckpointEnvironmentContext.java b/src/main/java/org/apache/flink/benchmark/CheckpointEnvironmentContext.java
index c70ac62..374994f 100644
--- a/src/main/java/org/apache/flink/benchmark/CheckpointEnvironmentContext.java
+++ b/src/main/java/org/apache/flink/benchmark/CheckpointEnvironmentContext.java
@@ -50,6 +50,42 @@ public abstract class CheckpointEnvironmentContext extends FlinkEnvironmentConte
 
     public JobID jobID;
 
+    @Override
+    public void setUp() throws Exception {
+        super.setUp();
+        env.setParallelism(CheckpointEnvironmentContext.JOB_PARALLELISM);
+        env.enableCheckpointing(Long.MAX_VALUE);
+
+        final StreamGraphWithSources streamGraphWithSources = getStreamGraph();
+
+        final JobClient jobClient = env.executeAsync(streamGraphWithSources.getStreamGraph());
+        jobID = jobClient.getJobID();
+        CommonTestUtils.waitForAllTaskRunning(miniCluster, jobID, false);
+        BackpressureUtils.waitForBackpressure(
+                jobID,
+                streamGraphWithSources.getSources(),
+                miniCluster.getRestAddress().get(),
+                miniCluster.getConfiguration());
+        if (getSleepPostSetUp() > 0) {
+            Thread.sleep(getSleepPostSetUp());
+        }
+    }
+
+    protected abstract CheckpointMode getMode();
+
+    protected abstract StreamGraphWithSources getStreamGraph();
+
+    protected int getSleepPostSetUp() {
+        return getMode() == CheckpointMode.ALIGNED
+                ? CheckpointEnvironmentContext.DEBLOATING_STABILIZATION_PERIOD
+                : 0;
+    }
+
+    @Override
+    protected Configuration createConfiguration() {
+        return getMode().configure(super.createConfiguration());
+    }
+
     /**
      * Checkpointing configuration to be used in {@link CheckpointingTimeBenchmark} & {@link
      * MultiInputCheckpointingTimeBenchmark}.
@@ -109,42 +145,6 @@ public abstract class CheckpointEnvironmentContext extends FlinkEnvironmentConte
         }
     }
 
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        env.setParallelism(CheckpointEnvironmentContext.JOB_PARALLELISM);
-        env.enableCheckpointing(Long.MAX_VALUE);
-
-        final StreamGraphWithSources streamGraphWithSources = getStreamGraph();
-
-        final JobClient jobClient = env.executeAsync(streamGraphWithSources.getStreamGraph());
-        jobID = jobClient.getJobID();
-        CommonTestUtils.waitForAllTaskRunning(miniCluster, jobID, false);
-        BackpressureUtils.waitForBackpressure(
-                jobID,
-                streamGraphWithSources.getSources(),
-                miniCluster.getRestAddress().get(),
-                miniCluster.getConfiguration());
-        if (getSleepPostSetUp() > 0) {
-            Thread.sleep(getSleepPostSetUp());
-        }
-    }
-
-    protected abstract CheckpointMode getMode();
-
-    protected abstract StreamGraphWithSources getStreamGraph();
-
-    protected int getSleepPostSetUp() {
-        return getMode() == CheckpointMode.ALIGNED
-                ? CheckpointEnvironmentContext.DEBLOATING_STABILIZATION_PERIOD
-                : 0;
-    }
-
-    @Override
-    protected Configuration createConfiguration() {
-        return getMode().configure(super.createConfiguration());
-    }
-
     /** A simple wrapper to pass a {@link StreamGraph} along with ids of sources it contains. */
     public static class StreamGraphWithSources {
         private final StreamGraph streamGraph;
diff --git a/src/main/java/org/apache/flink/benchmark/CollectSink.java b/src/main/java/org/apache/flink/benchmark/CollectSink.java
index 4189747..ba1b82c 100644
--- a/src/main/java/org/apache/flink/benchmark/CollectSink.java
+++ b/src/main/java/org/apache/flink/benchmark/CollectSink.java
@@ -23,14 +23,12 @@ import org.apache.flink.streaming.api.functions.sink.SinkFunction;
 import java.util.ArrayList;
 import java.util.List;
 
-/**
- * Created by pnowojski on 7/5/17.
- */
+/** Created by pnowojski on 7/5/17. */
 public class CollectSink<T> implements SinkFunction<T> {
-	public final List<T> result = new ArrayList<>();
+    public final List<T> result = new ArrayList<>();
 
-	@Override
-	public void invoke(T value) throws Exception {
-		result.add(value);
-	}
+    @Override
+    public void invoke(T value) throws Exception {
+        result.add(value);
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/ContinuousFileReaderOperatorBenchmark.java b/src/main/java/org/apache/flink/benchmark/ContinuousFileReaderOperatorBenchmark.java
index 09ca8e3..a829823 100644
--- a/src/main/java/org/apache/flink/benchmark/ContinuousFileReaderOperatorBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/ContinuousFileReaderOperatorBenchmark.java
@@ -48,19 +48,26 @@ public class ContinuousFileReaderOperatorBenchmark extends BenchmarkBase {
     private static final int LINES_PER_SPLIT = 175_000;
     public static final int RECORDS_PER_INVOCATION = SPLITS_PER_INVOCATION * LINES_PER_SPLIT;
 
-    private static final TimestampedFileInputSplit SPLIT = new TimestampedFileInputSplit(0, 0, new Path("."), 0, 0, new String[]{});
+    private static final TimestampedFileInputSplit SPLIT =
+            new TimestampedFileInputSplit(0, 0, new Path("."), 0, 0, new String[] {});
     private static final String LINE = Strings.repeat('0', 10);
 
-    // Source should wait until all elements reach sink. Otherwise, END_OF_INPUT is sent once all splits are emitted.
-    // Thus, all subsequent reads in ContinuousFileReaderOperator would be made in CLOSING state in a simple while-true loop (MailboxExecutor.isIdle is always true).
+    // Source should wait until all elements reach sink. Otherwise, END_OF_INPUT is sent once all
+    // splits are emitted.
+    // Thus, all subsequent reads in ContinuousFileReaderOperator would be made in CLOSING state in
+    // a simple while-true loop (MailboxExecutor.isIdle is always true).
     private static OneShotLatch TARGET_COUNT_REACHED_LATCH = new OneShotLatch();
 
-    public static void main(String[] args)
-            throws RunnerException {
-        Options options = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + ContinuousFileReaderOperatorBenchmark.class.getCanonicalName() + ".*")
-                .build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + ContinuousFileReaderOperatorBenchmark.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
 
         new Runner(options).run();
     }
@@ -70,11 +77,12 @@ public class ContinuousFileReaderOperatorBenchmark extends BenchmarkBase {
         TARGET_COUNT_REACHED_LATCH.reset();
         StreamExecutionEnvironment env = context.env;
         env.setRestartStrategy(new RestartStrategies.NoRestartStrategyConfiguration());
-        env
-                .enableCheckpointing(100)
+        env.enableCheckpointing(100)
                 .setParallelism(1)
                 .addSource(new MockSourceFunction())
-                .transform("fileReader", TypeInformation.of(String.class),
+                .transform(
+                        "fileReader",
+                        TypeInformation.of(String.class),
                         new ContinuousFileReaderOperatorFactory<>(new MockInputFormat()))
                 .addSink(new LimitedSink());
 
@@ -102,7 +110,7 @@ public class ContinuousFileReaderOperatorBenchmark extends BenchmarkBase {
                         Thread.currentThread().interrupt();
                     }
                 } catch (TimeoutException e) {
-                	// continue waiting
+                    // continue waiting
                 }
             }
         }
@@ -144,8 +152,8 @@ public class ContinuousFileReaderOperatorBenchmark extends BenchmarkBase {
 
         @Override
         public void invoke(String value, Context context) {
-        	if (++count == RECORDS_PER_INVOCATION) {
-        	    TARGET_COUNT_REACHED_LATCH.trigger();
+            if (++count == RECORDS_PER_INVOCATION) {
+                TARGET_COUNT_REACHED_LATCH.trigger();
             }
         }
     }
diff --git a/src/main/java/org/apache/flink/benchmark/FlinkEnvironmentContext.java b/src/main/java/org/apache/flink/benchmark/FlinkEnvironmentContext.java
index 60e1b0a..4935e68 100644
--- a/src/main/java/org/apache/flink/benchmark/FlinkEnvironmentContext.java
+++ b/src/main/java/org/apache/flink/benchmark/FlinkEnvironmentContext.java
@@ -39,12 +39,10 @@ import static org.openjdk.jmh.annotations.Scope.Thread;
 public class FlinkEnvironmentContext {
 
     public static final int NUM_NETWORK_BUFFERS = 1000;
-
-    public StreamExecutionEnvironment env;
-    public MiniCluster miniCluster;
-
     protected final int parallelism = 1;
     protected final boolean objectReuse = true;
+    public StreamExecutionEnvironment env;
+    public MiniCluster miniCluster;
 
     @Setup
     public void setUp() throws Exception {
@@ -52,11 +50,13 @@ public class FlinkEnvironmentContext {
             throw new RuntimeException("setUp was called multiple times!");
         }
         final Configuration clusterConfig = createConfiguration();
-        miniCluster = new MiniCluster(new MiniClusterConfiguration.Builder()
-            .setNumSlotsPerTaskManager(getNumberOfSlotsPerTaskManager())
-            .setNumTaskManagers(getNumberOfTaskManagers())
-            .setConfiguration(clusterConfig)
-            .build());
+        miniCluster =
+                new MiniCluster(
+                        new MiniClusterConfiguration.Builder()
+                                .setNumSlotsPerTaskManager(getNumberOfSlotsPerTaskManager())
+                                .setNumTaskManagers(getNumberOfTaskManagers())
+                                .setConfiguration(clusterConfig)
+                                .build());
 
         try {
             miniCluster.start();
@@ -64,10 +64,11 @@ public class FlinkEnvironmentContext {
             throw new RuntimeException(e);
         }
         // set up the execution environment
-        env = new StreamExecutionEnvironment(
-            new MiniClusterPipelineExecutorServiceLoader(miniCluster),
-            clusterConfig,
-            null);
+        env =
+                new StreamExecutionEnvironment(
+                        new MiniClusterPipelineExecutorServiceLoader(miniCluster),
+                        clusterConfig,
+                        null);
 
         env.setParallelism(parallelism);
         if (objectReuse) {
@@ -98,7 +99,8 @@ public class FlinkEnvironmentContext {
     protected Configuration createConfiguration() {
         final Configuration configuration = new Configuration();
         configuration.setString(RestOptions.BIND_PORT, "0");
-        configuration.setInteger(NettyShuffleEnvironmentOptions.NETWORK_NUM_BUFFERS, NUM_NETWORK_BUFFERS);
+        configuration.setInteger(
+                NettyShuffleEnvironmentOptions.NETWORK_NUM_BUFFERS, NUM_NETWORK_BUFFERS);
         configuration.set(DeploymentOptions.TARGET, MiniClusterPipelineExecutorServiceLoader.NAME);
         configuration.set(DeploymentOptions.ATTACHED, true);
         return configuration;
diff --git a/src/main/java/org/apache/flink/benchmark/InputBenchmark.java b/src/main/java/org/apache/flink/benchmark/InputBenchmark.java
index d35d592..08b3fcf 100644
--- a/src/main/java/org/apache/flink/benchmark/InputBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/InputBenchmark.java
@@ -35,51 +35,48 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 
 @OperationsPerInvocation(value = InputBenchmark.RECORDS_PER_INVOCATION)
 public class InputBenchmark extends BenchmarkBase {
-	public static final int RECORDS_PER_INVOCATION = 15_000_000;
-	private static final long CHECKPOINT_INTERVAL_MS = 100;
+    public static final int RECORDS_PER_INVOCATION = 15_000_000;
+    private static final long CHECKPOINT_INTERVAL_MS = 100;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + InputBenchmark.class.getCanonicalName() + ".*")
-				.build();
+    @Param({"LEGACY", "F27_UNBOUNDED"})
+    public LongSourceType sourceType;
 
-		new Runner(options).run();
-	}
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + InputBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
-	@Param({"LEGACY", "F27_UNBOUNDED"})
-	public LongSourceType sourceType;
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void mapSink(FlinkEnvironmentContext context) throws Exception {
+    @Benchmark
+    public void mapSink(FlinkEnvironmentContext context) throws Exception {
 
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-		env.setParallelism(1);
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+        env.setParallelism(1);
 
-		DataStreamSource<Long> source = sourceType.source(env, RECORDS_PER_INVOCATION);
-		source
-				.map(new MultiplyByTwo())
-				.addSink(new DiscardingSink<>());
+        DataStreamSource<Long> source = sourceType.source(env, RECORDS_PER_INVOCATION);
+        source.map(new MultiplyByTwo()).addSink(new DiscardingSink<>());
 
-		env.execute();
-	}
+        env.execute();
+    }
 
-	@Benchmark
-	public void mapRebalanceMapSink(FlinkEnvironmentContext context) throws Exception {
+    @Benchmark
+    public void mapRebalanceMapSink(FlinkEnvironmentContext context) throws Exception {
 
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-		env.setParallelism(1);
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+        env.setParallelism(1);
 
-		DataStreamSource<Long> source = sourceType.source(env, RECORDS_PER_INVOCATION);
-		source
-				.map(new MultiplyByTwo())
-				.rebalance()
-				.map((Long in) -> in)
-				.addSink(new DiscardingSink<>());
+        DataStreamSource<Long> source = sourceType.source(env, RECORDS_PER_INVOCATION);
+        source.map(new MultiplyByTwo())
+                .rebalance()
+                .map((Long in) -> in)
+                .addSink(new DiscardingSink<>());
 
-		env.execute();
-	}
-}
\ No newline at end of file
+        env.execute();
+    }
+}
diff --git a/src/main/java/org/apache/flink/benchmark/KeyByBenchmarks.java b/src/main/java/org/apache/flink/benchmark/KeyByBenchmarks.java
index 13bc5fb..3fdc289 100644
--- a/src/main/java/org/apache/flink/benchmark/KeyByBenchmarks.java
+++ b/src/main/java/org/apache/flink/benchmark/KeyByBenchmarks.java
@@ -31,74 +31,72 @@ import org.openjdk.jmh.runner.options.Options;
 import org.openjdk.jmh.runner.options.OptionsBuilder;
 import org.openjdk.jmh.runner.options.VerboseMode;
 
-/**
- * Benchmark for keyBy() on tuples and arrays.
- */
+/** Benchmark for keyBy() on tuples and arrays. */
 public class KeyByBenchmarks extends BenchmarkBase {
 
-	private static final int TUPLE_RECORDS_PER_INVOCATION = 15_000_000;
-	private static final int ARRAY_RECORDS_PER_INVOCATION = 7_000_000;
-
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + KeyByBenchmarks.class.getCanonicalName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = KeyByBenchmarks.TUPLE_RECORDS_PER_INVOCATION)
-	public void tupleKeyBy(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new IncreasingTupleSource(TUPLE_RECORDS_PER_INVOCATION, 10))
-				.keyBy(0)
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = KeyByBenchmarks.ARRAY_RECORDS_PER_INVOCATION)
-	public void arrayKeyBy(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new IncreasingArraySource(ARRAY_RECORDS_PER_INVOCATION, 10))
-				.keyBy(0)
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	private static class IncreasingTupleSource extends BaseSourceWithKeyRange<Tuple2<Integer, Integer>> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		IncreasingTupleSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected Tuple2<Integer, Integer> getElement(int keyId) {
-			return new Tuple2<>(keyId, 1);
-		}
-
-	}
-
-	private static class IncreasingArraySource extends BaseSourceWithKeyRange<int[]> {
-		private static final long serialVersionUID = -7883758559005221998L;
-
-		IncreasingArraySource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected int[] getElement(int keyId) {
-			return new int[] {keyId, 1};
-		}
-	}
+    private static final int TUPLE_RECORDS_PER_INVOCATION = 15_000_000;
+    private static final int ARRAY_RECORDS_PER_INVOCATION = 7_000_000;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + KeyByBenchmarks.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = KeyByBenchmarks.TUPLE_RECORDS_PER_INVOCATION)
+    public void tupleKeyBy(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new IncreasingTupleSource(TUPLE_RECORDS_PER_INVOCATION, 10))
+                .keyBy(0)
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = KeyByBenchmarks.ARRAY_RECORDS_PER_INVOCATION)
+    public void arrayKeyBy(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new IncreasingArraySource(ARRAY_RECORDS_PER_INVOCATION, 10))
+                .keyBy(0)
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    private static class IncreasingTupleSource
+            extends BaseSourceWithKeyRange<Tuple2<Integer, Integer>> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        IncreasingTupleSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected Tuple2<Integer, Integer> getElement(int keyId) {
+            return new Tuple2<>(keyId, 1);
+        }
+    }
+
+    private static class IncreasingArraySource extends BaseSourceWithKeyRange<int[]> {
+        private static final long serialVersionUID = -7883758559005221998L;
+
+        IncreasingArraySource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected int[] getElement(int keyId) {
+            return new int[] {keyId, 1};
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/MemoryStateBackendBenchmark.java b/src/main/java/org/apache/flink/benchmark/MemoryStateBackendBenchmark.java
index a781711..2f5943f 100644
--- a/src/main/java/org/apache/flink/benchmark/MemoryStateBackendBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/MemoryStateBackendBenchmark.java
@@ -36,32 +36,33 @@ import static org.openjdk.jmh.annotations.Scope.Thread;
 
 @OperationsPerInvocation(value = MemoryStateBackendBenchmark.RECORDS_PER_INVOCATION)
 public class MemoryStateBackendBenchmark extends StateBackendBenchmarkBase {
-	public static final int RECORDS_PER_INVOCATION = 7_000_000;
+    public static final int RECORDS_PER_INVOCATION = 7_000_000;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + MemoryStateBackendBenchmark.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + MemoryStateBackendBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void stateBackends(MemoryStateBackendContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
-		context.execute();
-	}
+    @Benchmark
+    public void stateBackends(MemoryStateBackendContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(
+                context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
+        context.execute();
+    }
 
-	@State(Thread)
-	public static class MemoryStateBackendContext extends StateBackendContext {
-		@Param({"MEMORY", "FS", "FS_ASYNC"})
-		public StateBackend stateBackend = StateBackend.MEMORY;
+    @State(Thread)
+    public static class MemoryStateBackendContext extends StateBackendContext {
+        @Param({"MEMORY", "FS", "FS_ASYNC"})
+        public StateBackend stateBackend = StateBackend.MEMORY;
 
-		@Override
-		public void setUp() throws Exception {
-			super.setUp(stateBackend, RECORDS_PER_INVOCATION);
-		}
-	}
+        @Override
+        public void setUp() throws Exception {
+            super.setUp(stateBackend, RECORDS_PER_INVOCATION);
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/MultipleInputBenchmark.java b/src/main/java/org/apache/flink/benchmark/MultipleInputBenchmark.java
index 53c6115..f466f5b 100644
--- a/src/main/java/org/apache/flink/benchmark/MultipleInputBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/MultipleInputBenchmark.java
@@ -52,144 +52,147 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 import java.util.concurrent.CompletableFuture;
 
 public class MultipleInputBenchmark extends BenchmarkBase {
-	public static final int RECORDS_PER_INVOCATION = TwoInputBenchmark.RECORDS_PER_INVOCATION;
-	public static final int ONE_IDLE_RECORDS_PER_INVOCATION = TwoInputBenchmark.ONE_IDLE_RECORDS_PER_INVOCATION;
-	public static final long CHECKPOINT_INTERVAL_MS = TwoInputBenchmark.CHECKPOINT_INTERVAL_MS;
-
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + MultipleInputBenchmark.class.getSimpleName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(RECORDS_PER_INVOCATION)
-	public void multiInputMapSink(FlinkEnvironmentContext context) throws Exception {
-
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-
-		long numRecordsPerInput = RECORDS_PER_INVOCATION / 2;
-		DataStreamSource<Long> source1 = env.addSource(new LongSource(numRecordsPerInput));
-		DataStreamSource<Long> source2 = env.addSource(new LongSource(numRecordsPerInput));
-		connectAndDiscard(env, source1, source2);
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(ONE_IDLE_RECORDS_PER_INVOCATION)
-	public void multiInputOneIdleMapSink(FlinkEnvironmentContext context) throws Exception {
-
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-
-		QueuingLongSource.reset();
-		DataStreamSource<Long> source1 = env.addSource(new QueuingLongSource(1, ONE_IDLE_RECORDS_PER_INVOCATION - 1));
-		DataStreamSource<Long> source2 = env.addSource(new QueuingLongSource(2, 1));
-		connectAndDiscard(env, source1, source2);
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(RECORDS_PER_INVOCATION)
-	public void multiInputChainedIdleSource(FlinkEnvironmentContext context) throws Exception {
-		final StreamExecutionEnvironment env = context.env;
-		env.getConfig().enableObjectReuse();
-
-		final DataStream<Long> source1 =
-				env.fromSource(
-						new NumberSequenceSource(1L, RECORDS_PER_INVOCATION),
-						WatermarkStrategy.noWatermarks(),
-						"source-1");
-
-		final DataStreamSource<Integer> source2 =
-				env.fromSource(new IdlingSource(1), WatermarkStrategy.noWatermarks(), "source-2");
-
-		MultipleInputTransformation<Long> transform = new MultipleInputTransformation<>(
-				"custom operator",
-				new MultiplyByTwoOperatorFactory(),
-				BasicTypeInfo.LONG_TYPE_INFO,
-				1);
-
-		transform.addInput(((DataStream<?>) source1).getTransformation());
-		transform.addInput(((DataStream<?>) source2).getTransformation());
-		transform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
-
-		env.addOperator(transform);
-		new MultipleConnectedStreams(env).transform(transform).addSink(new SinkClosingIdlingSource()).setParallelism(1);
-		context.execute();
-	}
-
-	private static class IdlingSource extends MockSource {
-		private static CompletableFuture<Void> canFinish = new CompletableFuture<>();
-
-		public static void signalCanFinish() {
-			canFinish.complete(null);
-		}
-
-		public static void reset() {
-			canFinish.completeExceptionally(new IllegalStateException("State has been reset"));
-			canFinish = new CompletableFuture<>();
-		}
-
-		public IdlingSource(int numSplits) {
-			super(Boundedness.BOUNDED, numSplits, true, true);
-		}
-
-		@Override
-		public SourceReader<Integer, MockSourceSplit> createReader(
-				SourceReaderContext readerContext) {
-			return new MockSourceReader(true, true) {
-				@Override
-				public InputStatus pollNext(ReaderOutput<Integer> sourceOutput) {
-					if (canFinish.isDone() && !canFinish.isCompletedExceptionally()) {
-						return InputStatus.END_OF_INPUT;
-					} else {
-						return InputStatus.NOTHING_AVAILABLE;
-					}
-				}
-
-				@Override
-				public synchronized CompletableFuture<Void> isAvailable() {
-					return canFinish;
-				}
-			};
-		}
-	}
-
-	private static class SinkClosingIdlingSource implements SinkFunction<Long> {
-		private int recordsSoFar = 0;
-
-		@Override
-		public void invoke(Long value) {
-			if (++recordsSoFar >= RECORDS_PER_INVOCATION) {
-				IdlingSource.signalCanFinish();
-			}
-		}
-	}
-
-	private static void connectAndDiscard(
-			StreamExecutionEnvironment env,
-			DataStream<?> source1,
-			DataStream<?> source2) {
-		MultipleInputTransformation<Long> transform = new MultipleInputTransformation<>(
-				"custom operator",
-				new MultiplyByTwoOperatorFactory(),
-				BasicTypeInfo.LONG_TYPE_INFO,
-				1);
-
-		transform.addInput(source1.getTransformation());
-		transform.addInput(source2.getTransformation());
-
-		env.addOperator(transform);
-		new MultipleConnectedStreams(env)
-				.transform(transform)
-				.addSink(new DiscardingSink<>());
-	}
+    public static final int RECORDS_PER_INVOCATION = TwoInputBenchmark.RECORDS_PER_INVOCATION;
+    public static final int ONE_IDLE_RECORDS_PER_INVOCATION =
+            TwoInputBenchmark.ONE_IDLE_RECORDS_PER_INVOCATION;
+    public static final long CHECKPOINT_INTERVAL_MS = TwoInputBenchmark.CHECKPOINT_INTERVAL_MS;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + MultipleInputBenchmark.class.getSimpleName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    private static void connectAndDiscard(
+            StreamExecutionEnvironment env, DataStream<?> source1, DataStream<?> source2) {
+        MultipleInputTransformation<Long> transform =
+                new MultipleInputTransformation<>(
+                        "custom operator",
+                        new MultiplyByTwoOperatorFactory(),
+                        BasicTypeInfo.LONG_TYPE_INFO,
+                        1);
+
+        transform.addInput(source1.getTransformation());
+        transform.addInput(source2.getTransformation());
+
+        env.addOperator(transform);
+        new MultipleConnectedStreams(env).transform(transform).addSink(new DiscardingSink<>());
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(RECORDS_PER_INVOCATION)
+    public void multiInputMapSink(FlinkEnvironmentContext context) throws Exception {
+
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+
+        long numRecordsPerInput = RECORDS_PER_INVOCATION / 2;
+        DataStreamSource<Long> source1 = env.addSource(new LongSource(numRecordsPerInput));
+        DataStreamSource<Long> source2 = env.addSource(new LongSource(numRecordsPerInput));
+        connectAndDiscard(env, source1, source2);
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(ONE_IDLE_RECORDS_PER_INVOCATION)
+    public void multiInputOneIdleMapSink(FlinkEnvironmentContext context) throws Exception {
+
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+
+        QueuingLongSource.reset();
+        DataStreamSource<Long> source1 =
+                env.addSource(new QueuingLongSource(1, ONE_IDLE_RECORDS_PER_INVOCATION - 1));
+        DataStreamSource<Long> source2 = env.addSource(new QueuingLongSource(2, 1));
+        connectAndDiscard(env, source1, source2);
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(RECORDS_PER_INVOCATION)
+    public void multiInputChainedIdleSource(FlinkEnvironmentContext context) throws Exception {
+        final StreamExecutionEnvironment env = context.env;
+        env.getConfig().enableObjectReuse();
+
+        final DataStream<Long> source1 =
+                env.fromSource(
+                        new NumberSequenceSource(1L, RECORDS_PER_INVOCATION),
+                        WatermarkStrategy.noWatermarks(),
+                        "source-1");
+
+        final DataStreamSource<Integer> source2 =
+                env.fromSource(new IdlingSource(1), WatermarkStrategy.noWatermarks(), "source-2");
+
+        MultipleInputTransformation<Long> transform =
+                new MultipleInputTransformation<>(
+                        "custom operator",
+                        new MultiplyByTwoOperatorFactory(),
+                        BasicTypeInfo.LONG_TYPE_INFO,
+                        1);
+
+        transform.addInput(((DataStream<?>) source1).getTransformation());
+        transform.addInput(((DataStream<?>) source2).getTransformation());
+        transform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
+
+        env.addOperator(transform);
+        new MultipleConnectedStreams(env)
+                .transform(transform)
+                .addSink(new SinkClosingIdlingSource())
+                .setParallelism(1);
+        context.execute();
+    }
+
+    private static class IdlingSource extends MockSource {
+        private static CompletableFuture<Void> canFinish = new CompletableFuture<>();
+
+        public IdlingSource(int numSplits) {
+            super(Boundedness.BOUNDED, numSplits, true, true);
+        }
+
+        public static void signalCanFinish() {
+            canFinish.complete(null);
+        }
+
+        public static void reset() {
+            canFinish.completeExceptionally(new IllegalStateException("State has been reset"));
+            canFinish = new CompletableFuture<>();
+        }
+
+        @Override
+        public SourceReader<Integer, MockSourceSplit> createReader(
+                SourceReaderContext readerContext) {
+            return new MockSourceReader(true, true) {
+                @Override
+                public InputStatus pollNext(ReaderOutput<Integer> sourceOutput) {
+                    if (canFinish.isDone() && !canFinish.isCompletedExceptionally()) {
+                        return InputStatus.END_OF_INPUT;
+                    } else {
+                        return InputStatus.NOTHING_AVAILABLE;
+                    }
+                }
+
+                @Override
+                public synchronized CompletableFuture<Void> isAvailable() {
+                    return canFinish;
+                }
+            };
+        }
+    }
+
+    private static class SinkClosingIdlingSource implements SinkFunction<Long> {
+        private int recordsSoFar = 0;
+
+        @Override
+        public void invoke(Long value) {
+            if (++recordsSoFar >= RECORDS_PER_INVOCATION) {
+                IdlingSource.signalCanFinish();
+            }
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/RemoteBenchmarkBase.java b/src/main/java/org/apache/flink/benchmark/RemoteBenchmarkBase.java
index 88047f4..9fb58c4 100644
--- a/src/main/java/org/apache/flink/benchmark/RemoteBenchmarkBase.java
+++ b/src/main/java/org/apache/flink/benchmark/RemoteBenchmarkBase.java
@@ -24,36 +24,33 @@ import org.apache.flink.runtime.minicluster.MiniClusterConfiguration;
 import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.TearDown;
 
-/**
- * Benchmark base for setting up the cluster to perform remote network shuffle.
- */
+/** Benchmark base for setting up the cluster to perform remote network shuffle. */
 public abstract class RemoteBenchmarkBase extends BenchmarkBase {
 
-	protected static final int PARALLELISM = 4;
-	protected static final int RECORDS_PER_SUBTASK = 10_000_000;
-	protected static final int RECORDS_PER_INVOCATION = RECORDS_PER_SUBTASK * PARALLELISM;
-
-	protected MiniCluster miniCluster;
-
-	@Setup
-	public void setUp() throws Exception {
-		MiniClusterConfiguration miniClusterConfiguration = new MiniClusterConfiguration.Builder()
-			.setNumTaskManagers(getNumberOfVertexes() * PARALLELISM)
-			.setNumSlotsPerTaskManager(1)
-			.build();
-		miniCluster = new MiniCluster(miniClusterConfiguration);
-		miniCluster.start();
-	}
-
-	@TearDown
-	public void tearDown() throws Exception {
-		if (miniCluster != null) {
-			miniCluster.close();
-		}
-	}
-
-	/**
-	 * @return the number of vertexes the respective job graph contains.
-	 */
-	abstract int getNumberOfVertexes();
+    protected static final int PARALLELISM = 4;
+    protected static final int RECORDS_PER_SUBTASK = 10_000_000;
+    protected static final int RECORDS_PER_INVOCATION = RECORDS_PER_SUBTASK * PARALLELISM;
+
+    protected MiniCluster miniCluster;
+
+    @Setup
+    public void setUp() throws Exception {
+        MiniClusterConfiguration miniClusterConfiguration =
+                new MiniClusterConfiguration.Builder()
+                        .setNumTaskManagers(getNumberOfVertexes() * PARALLELISM)
+                        .setNumSlotsPerTaskManager(1)
+                        .build();
+        miniCluster = new MiniCluster(miniClusterConfiguration);
+        miniCluster.start();
+    }
+
+    @TearDown
+    public void tearDown() throws Exception {
+        if (miniCluster != null) {
+            miniCluster.close();
+        }
+    }
+
+    /** @return the number of vertexes the respective job graph contains. */
+    abstract int getNumberOfVertexes();
 }
diff --git a/src/main/java/org/apache/flink/benchmark/RocksStateBackendBenchmark.java b/src/main/java/org/apache/flink/benchmark/RocksStateBackendBenchmark.java
index 912ac44..b6db6e9 100644
--- a/src/main/java/org/apache/flink/benchmark/RocksStateBackendBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/RocksStateBackendBenchmark.java
@@ -39,40 +39,43 @@ import static org.openjdk.jmh.annotations.Scope.Thread;
 
 @OperationsPerInvocation(value = RocksStateBackendBenchmark.RECORDS_PER_INVOCATION)
 public class RocksStateBackendBenchmark extends StateBackendBenchmarkBase {
-	public static final int RECORDS_PER_INVOCATION = 2_000_000;
+    public static final int RECORDS_PER_INVOCATION = 2_000_000;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + RocksStateBackendBenchmark.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + RocksStateBackendBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void stateBackends(RocksStateBackendContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
-		context.execute();
-	}
+    @Benchmark
+    public void stateBackends(RocksStateBackendContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(
+                context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
+        context.execute();
+    }
 
-	@State(Thread)
-	public static class RocksStateBackendContext extends StateBackendContext {
-		@Param({"ROCKS", "ROCKS_INC"})
-		public StateBackend stateBackend = StateBackend.MEMORY;
+    @State(Thread)
+    public static class RocksStateBackendContext extends StateBackendContext {
+        @Param({"ROCKS", "ROCKS_INC"})
+        public StateBackend stateBackend = StateBackend.MEMORY;
 
-		@Override
-		public void setUp() throws Exception {
-			super.setUp(stateBackend, RECORDS_PER_INVOCATION);
-		}
+        @Override
+        public void setUp() throws Exception {
+            super.setUp(stateBackend, RECORDS_PER_INVOCATION);
+        }
 
-		@Override
-		protected Configuration createConfiguration() {
-			Configuration configuration = super.createConfiguration();
-			// explicit set the managed memory as 322122552 bytes, which is the default managed memory of 1GB TM with 1 slot.
-			configuration.set(RocksDBOptions.FIX_PER_SLOT_MEMORY_SIZE, MemorySize.parse("322122552b"));
-			return configuration;
-		}
-	}
+        @Override
+        protected Configuration createConfiguration() {
+            Configuration configuration = super.createConfiguration();
+            // explicit set the managed memory as 322122552 bytes, which is the default managed
+            // memory of 1GB TM with 1 slot.
+            configuration.set(
+                    RocksDBOptions.FIX_PER_SLOT_MEMORY_SIZE, MemorySize.parse("322122552b"));
+            return configuration;
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/SerializationFrameworkMiniBenchmarks.java b/src/main/java/org/apache/flink/benchmark/SerializationFrameworkMiniBenchmarks.java
index 8087987..37ba535 100644
--- a/src/main/java/org/apache/flink/benchmark/SerializationFrameworkMiniBenchmarks.java
+++ b/src/main/java/org/apache/flink/benchmark/SerializationFrameworkMiniBenchmarks.java
@@ -42,472 +42,500 @@ import java.util.Arrays;
 import java.util.Random;
 import java.util.concurrent.ThreadLocalRandom;
 
-/**
- * Benchmark for serializing POJOs and Tuples with different serialization frameworks.
- */
+/** Benchmark for serializing POJOs and Tuples with different serialization frameworks. */
 public class SerializationFrameworkMiniBenchmarks extends BenchmarkBase {
 
-	protected static final int RECORDS_PER_INVOCATION = 300_000;
-
-	public static void main(String[] args) throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + SerializationFrameworkMiniBenchmarks.class.getCanonicalName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerPojo(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		ExecutionConfig executionConfig = env.getConfig();
-		executionConfig.registerPojoType(MyPojo.class);
-		executionConfig.registerPojoType(MyOperation.class);
-
-		env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerHeavyString(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(1);
-		ExecutionConfig executionConfig = env.getConfig();
-		executionConfig.registerPojoType(MyPojo.class);
-		executionConfig.registerPojoType(MyOperation.class);
-
-		env.addSource(new LongStringSource(RECORDS_PER_INVOCATION, 12))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerTuple(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new TupleSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerKryo(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		ExecutionConfig executionConfig = env.getConfig();
-		executionConfig.enableForceKryo();
-		executionConfig.registerKryoType(MyPojo.class);
-		executionConfig.registerKryoType(MyOperation.class);
-
-		env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerAvro(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new AvroPojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerRow(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new RowSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	/**
-	 * Source emitting a long String.
-	 */
-	public static class LongStringSource extends BaseSourceWithKeyRange<String> {
-		private static final long serialVersionUID = 3746240885982877398L;
-		private String[] templates;
-
-		public LongStringSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-			templates = new String[] {
-					makeString(StringSerializationBenchmark.asciiChars, 1024),
-					makeString(StringSerializationBenchmark.russianChars, 1024),
-					makeString(StringSerializationBenchmark.chineseChars, 1024)
-			};
-		}
-
-		private String makeString(char[] symbols, int length) {
-			char[] buffer = new char[length];
-			Random random = ThreadLocalRandom.current();
-			Arrays.fill(buffer, symbols[random.nextInt(symbols.length)]);
-			return new String(buffer);
-		}
-
-		@Override
-		protected String getElement(int keyId) {
-			return templates[keyId % templates.length];
-		}
-	}
-
-	/**
-	 * Source emitting a simple {@link MyPojo POJO}.
-	 */
-	public static class PojoSource extends BaseSourceWithKeyRange<MyPojo> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private transient MyPojo template;
-
-		public PojoSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-			template = new MyPojo(
-					0,
-					"myName",
-					new String[] {"op1", "op2", "op3", "op4"},
-					new MyOperation[] {
-							new MyOperation(1, "op1"),
-							new MyOperation(2, "op2"),
-							new MyOperation(3, "op3")},
-					1,
-					2,
-					3,
-					"null");
-		}
-
-		@Override
-		protected MyPojo getElement(int keyId) {
-			template.setId(keyId);
-			return template;
-		}
-	}
-
-	/**
-	 * Source emitting a {@link org.apache.flink.benchmark.avro.MyPojo POJO} generated by an Avro schema.
-	 */
-	public static class AvroPojoSource extends BaseSourceWithKeyRange<org.apache.flink.benchmark.avro.MyPojo> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private transient org.apache.flink.benchmark.avro.MyPojo template;
-
-		public AvroPojoSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-			template = new org.apache.flink.benchmark.avro.MyPojo(
-					0,
-					"myName",
-					Arrays.asList("op1", "op2", "op3", "op4"),
-					Arrays.asList(
-							new org.apache.flink.benchmark.avro.MyOperation(1, "op1"),
-							new org.apache.flink.benchmark.avro.MyOperation(2, "op2"),
-							new org.apache.flink.benchmark.avro.MyOperation(3, "op3")),
-					1,
-					2,
-					3,
-					"null");
-		}
-
-		@Override
-		protected org.apache.flink.benchmark.avro.MyPojo getElement(int keyId) {
-			template.setId(keyId);
-			return template;
-		}
-	}
-
-	/**
-	 * Source emitting a <tt>Tuple</tt> based on {@link MyPojo}.
-	 */
-	public static class TupleSource extends BaseSourceWithKeyRange<Tuple8<Integer, String, String[], Tuple2<Integer, String>[], Integer, Integer, Integer, Object>> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private transient Tuple8 template;
-
-		public TupleSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@SuppressWarnings("unchecked")
-		@Override
-		protected void init() {
-			super.init();
-			template = MyPojo.createTuple(
-					0,
-					"myName",
-					new String[] {"op1", "op2", "op3", "op4"},
-					new Tuple2[] {
-							MyOperation.createTuple(1, "op1"),
-							MyOperation.createTuple(2, "op2"),
-							MyOperation.createTuple(3, "op3")},
-					1,
-					2,
-					3,
-					"null");
-		}
-
-		@Override
-		protected Tuple8<Integer, String, String[], Tuple2<Integer, String>[], Integer, Integer, Integer, Object> getElement(int keyId) {
-			template.setField(keyId, 0);
-			return template;
-		}
-	}
-
-	/**
-	 * Source emitting a {@link Row} based on {@link MyPojo}.
-	 */
-	public static class RowSource extends BaseSourceWithKeyRange<Row> implements ResultTypeQueryable<Row> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private transient Row template;
-
-		public RowSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@SuppressWarnings("unchecked")
-		@Override
-		protected void init() {
-			super.init();
-			template = MyPojo.createRow(
-					0,
-					"myName",
-					new String[] {"op1", "op2", "op3", "op4"},
-					new Row[] {
-							MyOperation.createRow(1, "op1"),
-							MyOperation.createRow(2, "op2"),
-							MyOperation.createRow(3, "op3")},
-					1,
-					2,
-					3,
-					"null");
-		}
-
-		@Override
-		protected Row getElement(int keyId) {
-			template.setField(0, keyId);
-			return template;
-		}
-
-		@Override
-		public TypeInformation<Row> getProducedType() {
-			return MyPojo.getProducedRowType();
-		}
-	}
-
-	/**
-	 * Not so simple POJO.
-	 */
-	@SuppressWarnings({"WeakerAccess", "unused"})
-	public static class MyPojo {
-		public int id;
-		private String name;
-		private String[] operationNames;
-		private MyOperation[] operations;
-		private int otherId1;
-		private int otherId2;
-		private int otherId3;
-		private Object someObject;
-
-		public MyPojo() {
-		}
-
-		public MyPojo(
-				int id,
-				String name,
-				String[] operationNames,
-				MyOperation[] operations,
-				int otherId1,
-				int otherId2,
-				int otherId3,
-				Object someObject) {
-			this.id = id;
-			this.name = name;
-			this.operationNames = operationNames;
-			this.operations = operations;
-			this.otherId1 = otherId1;
-			this.otherId2 = otherId2;
-			this.otherId3 = otherId3;
-			this.someObject = someObject;
-		}
-
-		public int getId() {
-			return id;
-		}
-
-		public void setId(int id) {
-			this.id = id;
-		}
-
-		public String getName() {
-			return name;
-		}
-
-		public void setName(String name) {
-			this.name = name;
-		}
-
-		public String[] getOperationNames() {
-			return operationNames;
-		}
-
-		public void setOperationNames(String[] operationNames) {
-			this.operationNames = operationNames;
-		}
-
-		public MyOperation[] getOperations() {
-			return operations;
-		}
-
-		public void setOperations(
-				MyOperation[] operations) {
-			this.operations = operations;
-		}
-
-		public int getOtherId1() {
-			return otherId1;
-		}
-
-		public void setOtherId1(int otherId1) {
-			this.otherId1 = otherId1;
-		}
-
-		public int getOtherId2() {
-			return otherId2;
-		}
-
-		public void setOtherId2(int otherId2) {
-			this.otherId2 = otherId2;
-		}
-
-		public int getOtherId3() {
-			return otherId3;
-		}
-
-		public void setOtherId3(int otherId3) {
-			this.otherId3 = otherId3;
-		}
-
-		public Object getSomeObject() {
-			return someObject;
-		}
-
-		public void setSomeObject(Object someObject) {
-			this.someObject = someObject;
-		}
-
-		public static Tuple8<Integer, String, String[], Tuple2<Integer, String>[], Integer, Integer, Integer, Object> createTuple(
-				int id,
-				String name,
-				String[] operationNames,
-				Tuple2<Integer, String>[] operations,
-				int otherId1,
-				int otherId2,
-				int otherId3,
-				Object someObject) {
-			return Tuple8.of(id, name, operationNames, operations, otherId1, otherId2, otherId3, someObject);
-		}
-
-		public static Row createRow(
-				int id,
-				String name,
-				String[] operationNames,
-				Row[] operations,
-				int otherId1,
-				int otherId2,
-				int otherId3,
-				Object someObject) {
-			return Row.of(id, name, operationNames, operations, otherId1, otherId2, otherId3, someObject);
-		}
-
-		public static TypeInformation<Row> getProducedRowType() {
-			return Types.ROW(
-					Types.INT,
-					Types.STRING,
-					Types.OBJECT_ARRAY(Types.STRING),
-					Types.OBJECT_ARRAY(Types.ROW(Types.INT, Types.STRING)),
-					Types.INT,
-					Types.INT,
-					Types.INT,
-					Types.GENERIC(Object.class)
-			);
-		}
-	}
-
-	/**
-	 * Another POJO.
-	 */
-	@SuppressWarnings({"WeakerAccess", "unused"})
-	public static class MyOperation {
-		int id;
-		protected String name;
-
-		public MyOperation() {
-		}
-
-		public MyOperation(int id, String name) {
-			this.id = id;
-			this.name = name;
-		}
-
-		public int getId() {
-			return id;
-		}
-
-		public void setId(int id) {
-			this.id = id;
-		}
-
-		public String getName() {
-			return name;
-		}
-
-		public void setName(String name) {
-			this.name = name;
-		}
-
-		public static Tuple2<Integer, String> createTuple(int id, String name) {
-			return Tuple2.of(id, name);
-		}
-
-		public static Row createRow(int id, String name) {
-			return Row.of(id, name);
-		}
-	}
+    protected static final int RECORDS_PER_INVOCATION = 300_000;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + SerializationFrameworkMiniBenchmarks.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerPojo(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        ExecutionConfig executionConfig = env.getConfig();
+        executionConfig.registerPojoType(MyPojo.class);
+        executionConfig.registerPojoType(MyOperation.class);
+
+        env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerHeavyString(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(1);
+        ExecutionConfig executionConfig = env.getConfig();
+        executionConfig.registerPojoType(MyPojo.class);
+        executionConfig.registerPojoType(MyOperation.class);
+
+        env.addSource(new LongStringSource(RECORDS_PER_INVOCATION, 12))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerTuple(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new TupleSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerKryo(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        ExecutionConfig executionConfig = env.getConfig();
+        executionConfig.enableForceKryo();
+        executionConfig.registerKryoType(MyPojo.class);
+        executionConfig.registerKryoType(MyOperation.class);
+
+        env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerAvro(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new AvroPojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerRow(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new RowSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    /** Source emitting a long String. */
+    public static class LongStringSource extends BaseSourceWithKeyRange<String> {
+        private static final long serialVersionUID = 3746240885982877398L;
+        private String[] templates;
+
+        public LongStringSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+            templates =
+                    new String[] {
+                        makeString(StringSerializationBenchmark.asciiChars, 1024),
+                        makeString(StringSerializationBenchmark.russianChars, 1024),
+                        makeString(StringSerializationBenchmark.chineseChars, 1024)
+                    };
+        }
+
+        private String makeString(char[] symbols, int length) {
+            char[] buffer = new char[length];
+            Random random = ThreadLocalRandom.current();
+            Arrays.fill(buffer, symbols[random.nextInt(symbols.length)]);
+            return new String(buffer);
+        }
+
+        @Override
+        protected String getElement(int keyId) {
+            return templates[keyId % templates.length];
+        }
+    }
+
+    /** Source emitting a simple {@link MyPojo POJO}. */
+    public static class PojoSource extends BaseSourceWithKeyRange<MyPojo> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient MyPojo template;
+
+        public PojoSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    new MyPojo(
+                            0,
+                            "myName",
+                            new String[] {"op1", "op2", "op3", "op4"},
+                            new MyOperation[] {
+                                new MyOperation(1, "op1"),
+                                new MyOperation(2, "op2"),
+                                new MyOperation(3, "op3")
+                            },
+                            1,
+                            2,
+                            3,
+                            "null");
+        }
+
+        @Override
+        protected MyPojo getElement(int keyId) {
+            template.setId(keyId);
+            return template;
+        }
+    }
+
+    /**
+     * Source emitting a {@link org.apache.flink.benchmark.avro.MyPojo POJO} generated by an Avro
+     * schema.
+     */
+    public static class AvroPojoSource
+            extends BaseSourceWithKeyRange<org.apache.flink.benchmark.avro.MyPojo> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient org.apache.flink.benchmark.avro.MyPojo template;
+
+        public AvroPojoSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    new org.apache.flink.benchmark.avro.MyPojo(
+                            0,
+                            "myName",
+                            Arrays.asList("op1", "op2", "op3", "op4"),
+                            Arrays.asList(
+                                    new org.apache.flink.benchmark.avro.MyOperation(1, "op1"),
+                                    new org.apache.flink.benchmark.avro.MyOperation(2, "op2"),
+                                    new org.apache.flink.benchmark.avro.MyOperation(3, "op3")),
+                            1,
+                            2,
+                            3,
+                            "null");
+        }
+
+        @Override
+        protected org.apache.flink.benchmark.avro.MyPojo getElement(int keyId) {
+            template.setId(keyId);
+            return template;
+        }
+    }
+
+    /** Source emitting a <tt>Tuple</tt> based on {@link MyPojo}. */
+    public static class TupleSource
+            extends BaseSourceWithKeyRange<
+                    Tuple8<
+                            Integer,
+                            String,
+                            String[],
+                            Tuple2<Integer, String>[],
+                            Integer,
+                            Integer,
+                            Integer,
+                            Object>> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient Tuple8 template;
+
+        public TupleSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @SuppressWarnings("unchecked")
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    MyPojo.createTuple(
+                            0,
+                            "myName",
+                            new String[] {"op1", "op2", "op3", "op4"},
+                            new Tuple2[] {
+                                MyOperation.createTuple(1, "op1"),
+                                MyOperation.createTuple(2, "op2"),
+                                MyOperation.createTuple(3, "op3")
+                            },
+                            1,
+                            2,
+                            3,
+                            "null");
+        }
+
+        @Override
+        protected Tuple8<
+                        Integer,
+                        String,
+                        String[],
+                        Tuple2<Integer, String>[],
+                        Integer,
+                        Integer,
+                        Integer,
+                        Object>
+                getElement(int keyId) {
+            template.setField(keyId, 0);
+            return template;
+        }
+    }
+
+    /** Source emitting a {@link Row} based on {@link MyPojo}. */
+    public static class RowSource extends BaseSourceWithKeyRange<Row>
+            implements ResultTypeQueryable<Row> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient Row template;
+
+        public RowSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @SuppressWarnings("unchecked")
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    MyPojo.createRow(
+                            0,
+                            "myName",
+                            new String[] {"op1", "op2", "op3", "op4"},
+                            new Row[] {
+                                MyOperation.createRow(1, "op1"),
+                                MyOperation.createRow(2, "op2"),
+                                MyOperation.createRow(3, "op3")
+                            },
+                            1,
+                            2,
+                            3,
+                            "null");
+        }
+
+        @Override
+        protected Row getElement(int keyId) {
+            template.setField(0, keyId);
+            return template;
+        }
+
+        @Override
+        public TypeInformation<Row> getProducedType() {
+            return MyPojo.getProducedRowType();
+        }
+    }
+
+    /** Not so simple POJO. */
+    @SuppressWarnings({"WeakerAccess", "unused"})
+    public static class MyPojo {
+        public int id;
+        private String name;
+        private String[] operationNames;
+        private MyOperation[] operations;
+        private int otherId1;
+        private int otherId2;
+        private int otherId3;
+        private Object someObject;
+
+        public MyPojo() {}
+
+        public MyPojo(
+                int id,
+                String name,
+                String[] operationNames,
+                MyOperation[] operations,
+                int otherId1,
+                int otherId2,
+                int otherId3,
+                Object someObject) {
+            this.id = id;
+            this.name = name;
+            this.operationNames = operationNames;
+            this.operations = operations;
+            this.otherId1 = otherId1;
+            this.otherId2 = otherId2;
+            this.otherId3 = otherId3;
+            this.someObject = someObject;
+        }
+
+        public static Tuple8<
+                        Integer,
+                        String,
+                        String[],
+                        Tuple2<Integer, String>[],
+                        Integer,
+                        Integer,
+                        Integer,
+                        Object>
+                createTuple(
+                        int id,
+                        String name,
+                        String[] operationNames,
+                        Tuple2<Integer, String>[] operations,
+                        int otherId1,
+                        int otherId2,
+                        int otherId3,
+                        Object someObject) {
+            return Tuple8.of(
+                    id, name, operationNames, operations, otherId1, otherId2, otherId3, someObject);
+        }
+
+        public static Row createRow(
+                int id,
+                String name,
+                String[] operationNames,
+                Row[] operations,
+                int otherId1,
+                int otherId2,
+                int otherId3,
+                Object someObject) {
+            return Row.of(
+                    id, name, operationNames, operations, otherId1, otherId2, otherId3, someObject);
+        }
+
+        public static TypeInformation<Row> getProducedRowType() {
+            return Types.ROW(
+                    Types.INT,
+                    Types.STRING,
+                    Types.OBJECT_ARRAY(Types.STRING),
+                    Types.OBJECT_ARRAY(Types.ROW(Types.INT, Types.STRING)),
+                    Types.INT,
+                    Types.INT,
+                    Types.INT,
+                    Types.GENERIC(Object.class));
+        }
+
+        public int getId() {
+            return id;
+        }
+
+        public void setId(int id) {
+            this.id = id;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+
+        public String[] getOperationNames() {
+            return operationNames;
+        }
+
+        public void setOperationNames(String[] operationNames) {
+            this.operationNames = operationNames;
+        }
+
+        public MyOperation[] getOperations() {
+            return operations;
+        }
+
+        public void setOperations(MyOperation[] operations) {
+            this.operations = operations;
+        }
+
+        public int getOtherId1() {
+            return otherId1;
+        }
+
+        public void setOtherId1(int otherId1) {
+            this.otherId1 = otherId1;
+        }
+
+        public int getOtherId2() {
+            return otherId2;
+        }
+
+        public void setOtherId2(int otherId2) {
+            this.otherId2 = otherId2;
+        }
+
+        public int getOtherId3() {
+            return otherId3;
+        }
+
+        public void setOtherId3(int otherId3) {
+            this.otherId3 = otherId3;
+        }
+
+        public Object getSomeObject() {
+            return someObject;
+        }
+
+        public void setSomeObject(Object someObject) {
+            this.someObject = someObject;
+        }
+    }
+
+    /** Another POJO. */
+    @SuppressWarnings({"WeakerAccess", "unused"})
+    public static class MyOperation {
+        protected String name;
+        int id;
+
+        public MyOperation() {}
+
+        public MyOperation(int id, String name) {
+            this.id = id;
+            this.name = name;
+        }
+
+        public static Tuple2<Integer, String> createTuple(int id, String name) {
+            return Tuple2.of(id, name);
+        }
+
+        public static Row createRow(int id, String name) {
+            return Row.of(id, name);
+        }
+
+        public int getId() {
+            return id;
+        }
+
+        public void setId(int id) {
+            this.id = id;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public void setName(String name) {
+            this.name = name;
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/SortingBoundedInputBenchmarks.java b/src/main/java/org/apache/flink/benchmark/SortingBoundedInputBenchmarks.java
index 3a0af48..009c423 100644
--- a/src/main/java/org/apache/flink/benchmark/SortingBoundedInputBenchmarks.java
+++ b/src/main/java/org/apache/flink/benchmark/SortingBoundedInputBenchmarks.java
@@ -95,17 +95,6 @@ public class SortingBoundedInputBenchmarks extends BenchmarkBase {
         new Runner(options).run();
     }
 
-    @State(Thread)
-    public static class SortingInputContext extends FlinkEnvironmentContext {
-        @Override
-        protected Configuration createConfiguration() {
-            Configuration configuration = super.createConfiguration();
-            configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
-            configuration.set(AlgorithmOptions.SORT_SPILLING_THRESHOLD, 0f);
-            return configuration;
-        }
-    }
-
     @Benchmark
     @OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
     public void sortedOneInput(SortingInputContext context) throws Exception {
@@ -194,6 +183,17 @@ public class SortingBoundedInputBenchmarks extends BenchmarkBase {
         context.execute();
     }
 
+    @State(Thread)
+    public static class SortingInputContext extends FlinkEnvironmentContext {
+        @Override
+        protected Configuration createConfiguration() {
+            Configuration configuration = super.createConfiguration();
+            configuration.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
+            configuration.set(AlgorithmOptions.SORT_SPILLING_THRESHOLD, 0f);
+            return configuration;
+        }
+    }
+
     private static final class ProcessedKeysOrderAsserter implements Serializable {
         private final Set<Integer> seenKeys = new HashSet<>();
         private long seenRecords = 0;
@@ -311,14 +311,14 @@ public class SortingBoundedInputBenchmarks extends BenchmarkBase {
         }
 
         @Override
-        public void setChainingStrategy(ChainingStrategy strategy) {}
-
-        @Override
         public ChainingStrategy getChainingStrategy() {
             return ChainingStrategy.NEVER;
         }
 
         @Override
+        public void setChainingStrategy(ChainingStrategy strategy) {}
+
+        @Override
         public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) {
             return AssertingThreeInputOperator.class;
         }
diff --git a/src/main/java/org/apache/flink/benchmark/StateBackendBenchmarkBase.java b/src/main/java/org/apache/flink/benchmark/StateBackendBenchmarkBase.java
index 1a1e82a..f3c81d1 100644
--- a/src/main/java/org/apache/flink/benchmark/StateBackendBenchmarkBase.java
+++ b/src/main/java/org/apache/flink/benchmark/StateBackendBenchmarkBase.java
@@ -32,69 +32,70 @@ import java.io.IOException;
 import java.nio.file.Files;
 
 public class StateBackendBenchmarkBase extends BenchmarkBase {
-	public enum StateBackend {
-		MEMORY,
-		FS,
-		FS_ASYNC,
-		ROCKS,
-		ROCKS_INC
-	}
+    public enum StateBackend {
+        MEMORY,
+        FS,
+        FS_ASYNC,
+        ROCKS,
+        ROCKS_INC
+    }
 
-	public static class StateBackendContext extends FlinkEnvironmentContext {
+    public static class StateBackendContext extends FlinkEnvironmentContext {
 
-		public final File checkpointDir;
+        public final File checkpointDir;
 
-		public final int numberOfElements = 1000;
+        public final int numberOfElements = 1000;
 
-		public DataStreamSource<IntegerLongSource.Record> source;
+        public DataStreamSource<IntegerLongSource.Record> source;
 
-		public StateBackendContext() {
-			try {
-				checkpointDir = Files.createTempDirectory("bench-").toFile();
-			} catch (IOException e) {
-				throw new RuntimeException(e);
-			}
-		}
+        public StateBackendContext() {
+            try {
+                checkpointDir = Files.createTempDirectory("bench-").toFile();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
 
-		public void setUp(StateBackend stateBackend, long recordsPerInvocation) throws IOException {
-			try {
-				super.setUp();
-			} catch (Exception e) {
-				e.printStackTrace();
-			}
+        public void setUp(StateBackend stateBackend, long recordsPerInvocation) throws IOException {
+            try {
+                super.setUp();
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
 
-			final AbstractStateBackend backend;
-			String checkpointDataUri = "file://" + checkpointDir.getAbsolutePath();
-			switch (stateBackend) {
-				case MEMORY:
-					backend = new MemoryStateBackend();
-					break;
-				case FS:
-					backend = new FsStateBackend(checkpointDataUri, false);
-					break;
-				case FS_ASYNC:
-					backend = new FsStateBackend(checkpointDataUri, true);
-					break;
-				case ROCKS:
-					backend = new RocksDBStateBackend(checkpointDataUri, false);
-					break;
-				case ROCKS_INC:
-					backend = new RocksDBStateBackend(checkpointDataUri, true);
-					break;
-				default:
-					throw new UnsupportedOperationException("Unknown state backend: " + stateBackend);
-			}
+            final AbstractStateBackend backend;
+            String checkpointDataUri = "file://" + checkpointDir.getAbsolutePath();
+            switch (stateBackend) {
+                case MEMORY:
+                    backend = new MemoryStateBackend();
+                    break;
+                case FS:
+                    backend = new FsStateBackend(checkpointDataUri, false);
+                    break;
+                case FS_ASYNC:
+                    backend = new FsStateBackend(checkpointDataUri, true);
+                    break;
+                case ROCKS:
+                    backend = new RocksDBStateBackend(checkpointDataUri, false);
+                    break;
+                case ROCKS_INC:
+                    backend = new RocksDBStateBackend(checkpointDataUri, true);
+                    break;
+                default:
+                    throw new UnsupportedOperationException(
+                            "Unknown state backend: " + stateBackend);
+            }
 
-			env.setStateBackend(backend);
+            env.setStateBackend(backend);
 
-			env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
-			source = env.addSource(new IntegerLongSource(numberOfElements, recordsPerInvocation));
-		}
+            env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
+            source = env.addSource(new IntegerLongSource(numberOfElements, recordsPerInvocation));
+        }
 
-		@Override
-		public void tearDown() throws Exception {
-			super.tearDown();
-			FileUtils.deleteDirectory(checkpointDir);
-		}
-	}
+        @Override
+        public void tearDown() throws Exception {
+            super.tearDown();
+            FileUtils.deleteDirectory(checkpointDir);
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/StreamGraphUtils.java b/src/main/java/org/apache/flink/benchmark/StreamGraphUtils.java
index 0746193..d083843 100644
--- a/src/main/java/org/apache/flink/benchmark/StreamGraphUtils.java
+++ b/src/main/java/org/apache/flink/benchmark/StreamGraphUtils.java
@@ -26,20 +26,19 @@ import org.apache.flink.streaming.api.functions.sink.DiscardingSink;
 import org.apache.flink.streaming.api.graph.GlobalStreamExchangeMode;
 import org.apache.flink.streaming.api.graph.StreamGraph;
 
-/**
- * Utilities for building respective graph for performing in benchmark.
- */
+/** Utilities for building respective graph for performing in benchmark. */
 public class StreamGraphUtils {
 
-	public static StreamGraph buildGraphForBatchJob(StreamExecutionEnvironment env, int numRecords) {
-		DataStreamSource<Long> source = env.addSource(new LongSource(numRecords));
-		source.addSink(new DiscardingSink<>());
+    public static StreamGraph buildGraphForBatchJob(
+            StreamExecutionEnvironment env, int numRecords) {
+        DataStreamSource<Long> source = env.addSource(new LongSource(numRecords));
+        source.addSink(new DiscardingSink<>());
 
-		StreamGraph streamGraph = env.getStreamGraph();
-		streamGraph.setChaining(false);
-		streamGraph.setGlobalStreamExchangeMode(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING);
-		streamGraph.setJobType(JobType.BATCH);
+        StreamGraph streamGraph = env.getStreamGraph();
+        streamGraph.setChaining(false);
+        streamGraph.setGlobalStreamExchangeMode(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING);
+        streamGraph.setJobType(JobType.BATCH);
 
-		return streamGraph;
-	}
+        return streamGraph;
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/TwoInputBenchmark.java b/src/main/java/org/apache/flink/benchmark/TwoInputBenchmark.java
index fe39549..b0f1144 100644
--- a/src/main/java/org/apache/flink/benchmark/TwoInputBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/TwoInputBenchmark.java
@@ -36,63 +36,68 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 
 public class TwoInputBenchmark extends BenchmarkBase {
 
-	public static final int RECORDS_PER_INVOCATION = 25_000_000;
-	public static final int ONE_IDLE_RECORDS_PER_INVOCATION = 15_000_000;
-	public static final long CHECKPOINT_INTERVAL_MS = 100;
-
-	public static void main(String[] args)
-		throws RunnerException {
-		Options options = new OptionsBuilder()
-			.verbosity(VerboseMode.NORMAL)
-			.include(".*" + TwoInputBenchmark.class.getCanonicalName() + ".*")
-			.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = TwoInputBenchmark.RECORDS_PER_INVOCATION)
-	public void twoInputMapSink(FlinkEnvironmentContext context) throws Exception {
-
-		StreamExecutionEnvironment env = context.env;
-
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-		env.setParallelism(1);
-
-		// Setting buffer timeout to 1 is an attempt to improve twoInputMapSink benchmark stability.
-		// Without 1ms buffer timeout, some JVM forks are much slower then others, making results
-		// unstable and unreliable.
-		env.setBufferTimeout(1);
-
-		long numRecordsPerInput = RECORDS_PER_INVOCATION / 2;
-		DataStreamSource<Long> source1 = env.addSource(new LongSource(numRecordsPerInput));
-		DataStreamSource<Long> source2 = env.addSource(new LongSource(numRecordsPerInput));
-
-		source1
-			.connect(source2)
-			.transform("custom operator", TypeInformation.of(Long.class), new MultiplyByTwoCoStreamMap())
-			.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = TwoInputBenchmark.ONE_IDLE_RECORDS_PER_INVOCATION)
-	public void twoInputOneIdleMapSink(FlinkEnvironmentContext context) throws Exception {
-
-		StreamExecutionEnvironment env = context.env;
-		env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
-		env.setParallelism(1);
-
-		QueuingLongSource.reset();
-		DataStreamSource<Long> source1 = env.addSource(new QueuingLongSource(1, ONE_IDLE_RECORDS_PER_INVOCATION - 1));
-		DataStreamSource<Long> source2 = env.addSource(new QueuingLongSource(2, 1));
-
-		source1
-				.connect(source2)
-				.transform("custom operator", TypeInformation.of(Long.class), new MultiplyByTwoCoStreamMap())
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
+    public static final int RECORDS_PER_INVOCATION = 25_000_000;
+    public static final int ONE_IDLE_RECORDS_PER_INVOCATION = 15_000_000;
+    public static final long CHECKPOINT_INTERVAL_MS = 100;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + TwoInputBenchmark.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = TwoInputBenchmark.RECORDS_PER_INVOCATION)
+    public void twoInputMapSink(FlinkEnvironmentContext context) throws Exception {
+
+        StreamExecutionEnvironment env = context.env;
+
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+        env.setParallelism(1);
+
+        // Setting buffer timeout to 1 is an attempt to improve twoInputMapSink benchmark stability.
+        // Without 1ms buffer timeout, some JVM forks are much slower then others, making results
+        // unstable and unreliable.
+        env.setBufferTimeout(1);
+
+        long numRecordsPerInput = RECORDS_PER_INVOCATION / 2;
+        DataStreamSource<Long> source1 = env.addSource(new LongSource(numRecordsPerInput));
+        DataStreamSource<Long> source2 = env.addSource(new LongSource(numRecordsPerInput));
+
+        source1.connect(source2)
+                .transform(
+                        "custom operator",
+                        TypeInformation.of(Long.class),
+                        new MultiplyByTwoCoStreamMap())
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = TwoInputBenchmark.ONE_IDLE_RECORDS_PER_INVOCATION)
+    public void twoInputOneIdleMapSink(FlinkEnvironmentContext context) throws Exception {
+
+        StreamExecutionEnvironment env = context.env;
+        env.enableCheckpointing(CHECKPOINT_INTERVAL_MS);
+        env.setParallelism(1);
+
+        QueuingLongSource.reset();
+        DataStreamSource<Long> source1 =
+                env.addSource(new QueuingLongSource(1, ONE_IDLE_RECORDS_PER_INVOCATION - 1));
+        DataStreamSource<Long> source2 = env.addSource(new QueuingLongSource(2, 1));
+
+        source1.connect(source2)
+                .transform(
+                        "custom operator",
+                        TypeInformation.of(Long.class),
+                        new MultiplyByTwoCoStreamMap())
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/WindowBenchmarks.java b/src/main/java/org/apache/flink/benchmark/WindowBenchmarks.java
index 9ee542e..78b87ec 100644
--- a/src/main/java/org/apache/flink/benchmark/WindowBenchmarks.java
+++ b/src/main/java/org/apache/flink/benchmark/WindowBenchmarks.java
@@ -39,53 +39,57 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 @OperationsPerInvocation(value = WindowBenchmarks.RECORDS_PER_INVOCATION)
 public class WindowBenchmarks extends BenchmarkBase {
 
-	public static final int RECORDS_PER_INVOCATION = 7_000_000;
+    public static final int RECORDS_PER_INVOCATION = 7_000_000;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + WindowBenchmarks.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + WindowBenchmarks.class.getCanonicalName() + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void globalWindow(TimeWindowContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, GlobalWindows.create());
-		context.execute();
-	}
+    @Benchmark
+    public void globalWindow(TimeWindowContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(context.source, GlobalWindows.create());
+        context.execute();
+    }
 
-	@Benchmark
-	public void tumblingWindow(TimeWindowContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
-		context.execute();
-	}
+    @Benchmark
+    public void tumblingWindow(TimeWindowContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(
+                context.source, TumblingEventTimeWindows.of(Time.seconds(10_000)));
+        context.execute();
+    }
 
-	@Benchmark
-	public void slidingWindow(TimeWindowContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, SlidingEventTimeWindows.of(Time.seconds(10_000), Time.seconds(1000)));
-		context.execute();
-	}
+    @Benchmark
+    public void slidingWindow(TimeWindowContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(
+                context.source,
+                SlidingEventTimeWindows.of(Time.seconds(10_000), Time.seconds(1000)));
+        context.execute();
+    }
 
-	@Benchmark
-	public void sessionWindow(TimeWindowContext context) throws Exception {
-		IntLongApplications.reduceWithWindow(context.source, EventTimeSessionWindows.withGap(Time.seconds(500)));
-		context.execute();
-	}
+    @Benchmark
+    public void sessionWindow(TimeWindowContext context) throws Exception {
+        IntLongApplications.reduceWithWindow(
+                context.source, EventTimeSessionWindows.withGap(Time.seconds(500)));
+        context.execute();
+    }
 
-	public static class TimeWindowContext extends FlinkEnvironmentContext {
-		public final int numberOfElements = 1000;
+    public static class TimeWindowContext extends FlinkEnvironmentContext {
+        public final int numberOfElements = 1000;
 
-		public DataStreamSource<IntegerLongSource.Record> source;
+        public DataStreamSource<IntegerLongSource.Record> source;
 
-		@Override
-		public void setUp() throws Exception {
-			super.setUp();
+        @Override
+        public void setUp() throws Exception {
+            super.setUp();
 
-			env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
-			source = env.addSource(new IntegerLongSource(numberOfElements, RECORDS_PER_INVOCATION));
-		}
-	}
+            env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
+            source = env.addSource(new IntegerLongSource(numberOfElements, RECORDS_PER_INVOCATION));
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/full/PojoSerializationBenchmark.java b/src/main/java/org/apache/flink/benchmark/full/PojoSerializationBenchmark.java
index 37fc508..1c98e9f 100644
--- a/src/main/java/org/apache/flink/benchmark/full/PojoSerializationBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/full/PojoSerializationBenchmark.java
@@ -58,7 +58,8 @@ public class PojoSerializationBenchmark extends BenchmarkBase {
 
     ExecutionConfig config = new ExecutionConfig();
     TypeSerializer<SerializationFrameworkMiniBenchmarks.MyPojo> pojoSerializer =
-            TypeInformation.of(SerializationFrameworkMiniBenchmarks.MyPojo.class).createSerializer(config);
+            TypeInformation.of(SerializationFrameworkMiniBenchmarks.MyPojo.class)
+                    .createSerializer(config);
     TypeSerializer<SerializationFrameworkMiniBenchmarks.MyPojo> kryoSerializer =
             new KryoSerializer<>(SerializationFrameworkMiniBenchmarks.MyPojo.class, config);
     TypeSerializer<org.apache.flink.benchmark.avro.MyPojo> avroSerializer =
@@ -68,48 +69,50 @@ public class PojoSerializationBenchmark extends BenchmarkBase {
     ByteArrayInputStream avroBuffer;
     ByteArrayInputStream kryoBuffer;
 
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + PojoSerializationBenchmark.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
 
     @Setup
     public void setup() throws IOException {
-        pojo = new SerializationFrameworkMiniBenchmarks.MyPojo(
-                0,
-                "myName",
-                new String[] {"op1", "op2", "op3", "op4"},
-                new SerializationFrameworkMiniBenchmarks.MyOperation[] {
-                        new SerializationFrameworkMiniBenchmarks.MyOperation(1, "op1"),
-                        new SerializationFrameworkMiniBenchmarks.MyOperation(2, "op2"),
-                        new SerializationFrameworkMiniBenchmarks.MyOperation(3, "op3")},
-                1,
-                2,
-                3,
-                "null");
-        avroPojo = new org.apache.flink.benchmark.avro.MyPojo(
-                0,
-                "myName",
-                Arrays.asList("op1", "op2", "op3", "op4"),
-                Arrays.asList(
-                        new org.apache.flink.benchmark.avro.MyOperation(1, "op1"),
-                        new org.apache.flink.benchmark.avro.MyOperation(2, "op2"),
-                        new org.apache.flink.benchmark.avro.MyOperation(3, "op3")),
-                1,
-                2,
-                3,
-                "null");
+        pojo =
+                new SerializationFrameworkMiniBenchmarks.MyPojo(
+                        0,
+                        "myName",
+                        new String[] {"op1", "op2", "op3", "op4"},
+                        new SerializationFrameworkMiniBenchmarks.MyOperation[] {
+                            new SerializationFrameworkMiniBenchmarks.MyOperation(1, "op1"),
+                            new SerializationFrameworkMiniBenchmarks.MyOperation(2, "op2"),
+                            new SerializationFrameworkMiniBenchmarks.MyOperation(3, "op3")
+                        },
+                        1,
+                        2,
+                        3,
+                        "null");
+        avroPojo =
+                new org.apache.flink.benchmark.avro.MyPojo(
+                        0,
+                        "myName",
+                        Arrays.asList("op1", "op2", "op3", "op4"),
+                        Arrays.asList(
+                                new org.apache.flink.benchmark.avro.MyOperation(1, "op1"),
+                                new org.apache.flink.benchmark.avro.MyOperation(2, "op2"),
+                                new org.apache.flink.benchmark.avro.MyOperation(3, "op3")),
+                        1,
+                        2,
+                        3,
+                        "null");
         pojoBuffer = new ByteArrayInputStream(write(pojoSerializer, pojo));
         avroBuffer = new ByteArrayInputStream(write(avroSerializer, avroPojo));
         kryoBuffer = new ByteArrayInputStream(write(kryoSerializer, pojo));
     }
 
-    public static void main(String[] args)
-            throws RunnerException {
-        Options options = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + PojoSerializationBenchmark.class.getCanonicalName() + ".*")
-                .build();
-
-        new Runner(options).run();
-    }
-
     @Benchmark
     public byte[] writePojo() throws IOException {
         return write(pojoSerializer, pojo);
diff --git a/src/main/java/org/apache/flink/benchmark/full/SerializationFrameworkAllBenchmarks.java b/src/main/java/org/apache/flink/benchmark/full/SerializationFrameworkAllBenchmarks.java
index d9efbb7..00d40c5 100644
--- a/src/main/java/org/apache/flink/benchmark/full/SerializationFrameworkAllBenchmarks.java
+++ b/src/main/java/org/apache/flink/benchmark/full/SerializationFrameworkAllBenchmarks.java
@@ -47,272 +47,294 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.util.Arrays;
 
-/**
- * Benchmark for serializing POJOs and Tuples with different serialization frameworks.
- */
+/** Benchmark for serializing POJOs and Tuples with different serialization frameworks. */
 public class SerializationFrameworkAllBenchmarks extends SerializationFrameworkMiniBenchmarks {
 
-	public static void main(String[] args) throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + SerializationFrameworkAllBenchmarks.class.getCanonicalName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
-	public void serializerPojoWithoutRegistration(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
-	public void serializerKryoWithoutRegistration(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		env.getConfig().enableForceKryo();
-
-		env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
-	public void serializerAvroReflect(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		env.getConfig().enableForceAvro();
-
-		env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
-	public void serializerAvroGeneric(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		Schema schema = AvroGenericRecordSource.loadSchema();
-		env.addSource(new AvroGenericRecordSource(RECORDS_PER_INVOCATION, 10, schema))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerScalaADT(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-
-		env.addSource(new ScalaADTSource(RECORDS_PER_INVOCATION), ScalaADTSource.adtTypeInfo())
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerKryoThrift(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		ExecutionConfig executionConfig = env.getConfig();
-		executionConfig.enableForceKryo();
-		executionConfig.addDefaultKryoSerializer(org.apache.flink.benchmark.thrift.MyPojo.class, TBaseSerializer.class);
-		executionConfig.addDefaultKryoSerializer(org.apache.flink.benchmark.thrift.MyOperation.class, TBaseSerializer.class);
-
-		env.addSource(new ThriftPojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	@Benchmark
-	@OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
-	public void serializerKryoProtobuf(FlinkEnvironmentContext context) throws Exception {
-		StreamExecutionEnvironment env = context.env;
-		env.setParallelism(4);
-		ExecutionConfig executionConfig = env.getConfig();
-		executionConfig.enableForceKryo();
-		executionConfig.registerTypeWithKryoSerializer(org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.class, ProtobufSerializer.class);
-		executionConfig.registerTypeWithKryoSerializer(org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation.class, ProtobufSerializer.class);
-
-		env.addSource(new ProtobufPojoSource(RECORDS_PER_INVOCATION, 10))
-				.rebalance()
-				.addSink(new DiscardingSink<>());
-
-		env.execute();
-	}
-
-	/**
-	 * Source emitting an Avro GenericRecord.
-	 */
-	public static class AvroGenericRecordSource extends BaseSourceWithKeyRange<GenericRecord> implements
-			ResultTypeQueryable<GenericRecord> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private final GenericRecordAvroTypeInfo producedType;
-		private transient Schema myPojoSchema;
-		private final String schemaString;
-
-		private transient GenericRecord template;
-
-		public AvroGenericRecordSource(int numEvents, int numKeys, Schema schema) {
-			super(numEvents, numKeys);
-			this.producedType = new GenericRecordAvroTypeInfo(schema);
-			this.myPojoSchema = schema;
-			this.schemaString = schema.toString();
-		}
-
-		private static Schema loadSchema() throws IOException {
-			ClassLoader classLoader = ClassLoader.getSystemClassLoader();
-			try (InputStream is = classLoader.getResourceAsStream("avro/mypojo.avsc")) {
-				if (is == null) {
-					throw new FileNotFoundException("File 'mypojo.avsc' not found");
-				}
-				return new Schema.Parser().parse(is);
-			}
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-
-			if (myPojoSchema == null) {
-				this.myPojoSchema = new Schema.Parser().parse(schemaString);
-			}
-			Schema myOperationSchema = myPojoSchema.getField("operations").schema().getElementType();
-
-			template = new GenericData.Record(myPojoSchema);
-			template.put("id", 0);
-			template.put("name", "myName");
-			template.put("operationName", Arrays.asList("op1", "op2", "op3", "op4"));
-
-			GenericData.Record op1 = new GenericData.Record(myOperationSchema);
-			op1.put("id", 1);
-			op1.put("name", "op1");
-			GenericData.Record op2 = new GenericData.Record(myOperationSchema);
-			op2.put("id", 2);
-			op2.put("name", "op2");
-			GenericData.Record op3 = new GenericData.Record(myOperationSchema);
-			op3.put("id", 3);
-			op3.put("name", "op3");
-			template.put("operations", Arrays.asList(op1, op2, op3));
-
-			template.put("otherId1", 1);
-			template.put("otherId2", 2);
-			template.put("otherId3", 3);
-			template.put("nullable", "null");
-		}
-
-		@Override
-		protected GenericRecord getElement(int keyId) {
-			template.put("id", keyId);
-			return template;
-		}
-
-		@Override
-		public TypeInformation<GenericRecord> getProducedType() {
-			return producedType;
-		}
-	}
-
-	/**
-	 * Source emitting a {@link org.apache.flink.benchmark.thrift.MyPojo POJO} generated by an Apache Thrift schema.
-	 */
-	public static class ThriftPojoSource extends BaseSourceWithKeyRange<org.apache.flink.benchmark.thrift.MyPojo> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-		private transient org.apache.flink.benchmark.thrift.MyPojo template;
-
-		public ThriftPojoSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-			template = new org.apache.flink.benchmark.thrift.MyPojo(
-					0,
-					"myName",
-					Arrays.asList("op1", "op2", "op3", "op4"),
-					Arrays.asList(
-							new org.apache.flink.benchmark.thrift.MyOperation(1, "op1"),
-							new org.apache.flink.benchmark.thrift.MyOperation(2, "op2"),
-							new org.apache.flink.benchmark.thrift.MyOperation(3, "op3")),
-					1,
-					2,
-					3);
-			template.setSomeObject("null");
-		}
-
-		@Override
-		protected org.apache.flink.benchmark.thrift.MyPojo getElement(int keyId) {
-			template.setId(keyId);
-			return template;
-		}
-	}
-
-	/**
-	 * Source emitting a {@link org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo POJO} generated by a Protobuf schema.
-	 */
-	public static class ProtobufPojoSource extends BaseSourceWithKeyRange<org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo> {
-		private static final long serialVersionUID = 2941333602938145526L;
-
-			private transient org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo template;
-
-		public ProtobufPojoSource(int numEvents, int numKeys) {
-			super(numEvents, numKeys);
-		}
-
-		@Override
-		protected void init() {
-			super.init();
-			template = org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.newBuilder()
-					.setId(0)
-					.setName("myName")
-					.addAllOperationName(Arrays.asList("op1", "op2", "op3", "op4"))
-					.addOperations(org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation.newBuilder()
-							.setId(1)
-							.setName("op1"))
-					.addOperations(org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation.newBuilder()
-							.setId(2)
-							.setName("op2"))
-					.addOperations(org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation.newBuilder()
-							.setId(3)
-							.setName("op3"))
-					.setOtherId1(1)
-					.setOtherId2(2)
-					.setOtherId3(3)
-					.setSomeObject("null")
-					.build();
-		}
-
-		@Override
-		protected org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo getElement(int keyId) {
-			return org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.newBuilder(template)
-					.setId(keyId)
-					.build();
-		}
-	}
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + SerializationFrameworkAllBenchmarks.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
+    public void serializerPojoWithoutRegistration(FlinkEnvironmentContext context)
+            throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
+    public void serializerKryoWithoutRegistration(FlinkEnvironmentContext context)
+            throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        env.getConfig().enableForceKryo();
+
+        env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
+    public void serializerAvroReflect(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        env.getConfig().enableForceAvro();
+
+        env.addSource(new PojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = RECORDS_PER_INVOCATION)
+    public void serializerAvroGeneric(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        Schema schema = AvroGenericRecordSource.loadSchema();
+        env.addSource(new AvroGenericRecordSource(RECORDS_PER_INVOCATION, 10, schema))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerScalaADT(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+
+        env.addSource(new ScalaADTSource(RECORDS_PER_INVOCATION), ScalaADTSource.adtTypeInfo())
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerKryoThrift(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        ExecutionConfig executionConfig = env.getConfig();
+        executionConfig.enableForceKryo();
+        executionConfig.addDefaultKryoSerializer(
+                org.apache.flink.benchmark.thrift.MyPojo.class, TBaseSerializer.class);
+        executionConfig.addDefaultKryoSerializer(
+                org.apache.flink.benchmark.thrift.MyOperation.class, TBaseSerializer.class);
+
+        env.addSource(new ThriftPojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    @Benchmark
+    @OperationsPerInvocation(value = SerializationFrameworkMiniBenchmarks.RECORDS_PER_INVOCATION)
+    public void serializerKryoProtobuf(FlinkEnvironmentContext context) throws Exception {
+        StreamExecutionEnvironment env = context.env;
+        env.setParallelism(4);
+        ExecutionConfig executionConfig = env.getConfig();
+        executionConfig.enableForceKryo();
+        executionConfig.registerTypeWithKryoSerializer(
+                org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.class,
+                ProtobufSerializer.class);
+        executionConfig.registerTypeWithKryoSerializer(
+                org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation.class,
+                ProtobufSerializer.class);
+
+        env.addSource(new ProtobufPojoSource(RECORDS_PER_INVOCATION, 10))
+                .rebalance()
+                .addSink(new DiscardingSink<>());
+
+        env.execute();
+    }
+
+    /** Source emitting an Avro GenericRecord. */
+    public static class AvroGenericRecordSource extends BaseSourceWithKeyRange<GenericRecord>
+            implements ResultTypeQueryable<GenericRecord> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private final GenericRecordAvroTypeInfo producedType;
+        private final String schemaString;
+        private transient Schema myPojoSchema;
+        private transient GenericRecord template;
+
+        public AvroGenericRecordSource(int numEvents, int numKeys, Schema schema) {
+            super(numEvents, numKeys);
+            this.producedType = new GenericRecordAvroTypeInfo(schema);
+            this.myPojoSchema = schema;
+            this.schemaString = schema.toString();
+        }
+
+        private static Schema loadSchema() throws IOException {
+            ClassLoader classLoader = ClassLoader.getSystemClassLoader();
+            try (InputStream is = classLoader.getResourceAsStream("avro/mypojo.avsc")) {
+                if (is == null) {
+                    throw new FileNotFoundException("File 'mypojo.avsc' not found");
+                }
+                return new Schema.Parser().parse(is);
+            }
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+
+            if (myPojoSchema == null) {
+                this.myPojoSchema = new Schema.Parser().parse(schemaString);
+            }
+            Schema myOperationSchema =
+                    myPojoSchema.getField("operations").schema().getElementType();
+
+            template = new GenericData.Record(myPojoSchema);
+            template.put("id", 0);
+            template.put("name", "myName");
+            template.put("operationName", Arrays.asList("op1", "op2", "op3", "op4"));
+
+            GenericData.Record op1 = new GenericData.Record(myOperationSchema);
+            op1.put("id", 1);
+            op1.put("name", "op1");
+            GenericData.Record op2 = new GenericData.Record(myOperationSchema);
+            op2.put("id", 2);
+            op2.put("name", "op2");
+            GenericData.Record op3 = new GenericData.Record(myOperationSchema);
+            op3.put("id", 3);
+            op3.put("name", "op3");
+            template.put("operations", Arrays.asList(op1, op2, op3));
+
+            template.put("otherId1", 1);
+            template.put("otherId2", 2);
+            template.put("otherId3", 3);
+            template.put("nullable", "null");
+        }
+
+        @Override
+        protected GenericRecord getElement(int keyId) {
+            template.put("id", keyId);
+            return template;
+        }
+
+        @Override
+        public TypeInformation<GenericRecord> getProducedType() {
+            return producedType;
+        }
+    }
+
+    /**
+     * Source emitting a {@link org.apache.flink.benchmark.thrift.MyPojo POJO} generated by an
+     * Apache Thrift schema.
+     */
+    public static class ThriftPojoSource
+            extends BaseSourceWithKeyRange<org.apache.flink.benchmark.thrift.MyPojo> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient org.apache.flink.benchmark.thrift.MyPojo template;
+
+        public ThriftPojoSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    new org.apache.flink.benchmark.thrift.MyPojo(
+                            0,
+                            "myName",
+                            Arrays.asList("op1", "op2", "op3", "op4"),
+                            Arrays.asList(
+                                    new org.apache.flink.benchmark.thrift.MyOperation(1, "op1"),
+                                    new org.apache.flink.benchmark.thrift.MyOperation(2, "op2"),
+                                    new org.apache.flink.benchmark.thrift.MyOperation(3, "op3")),
+                            1,
+                            2,
+                            3);
+            template.setSomeObject("null");
+        }
+
+        @Override
+        protected org.apache.flink.benchmark.thrift.MyPojo getElement(int keyId) {
+            template.setId(keyId);
+            return template;
+        }
+    }
+
+    /**
+     * Source emitting a {@link org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo POJO}
+     * generated by a Protobuf schema.
+     */
+    public static class ProtobufPojoSource
+            extends BaseSourceWithKeyRange<
+                    org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo> {
+        private static final long serialVersionUID = 2941333602938145526L;
+
+        private transient org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo template;
+
+        public ProtobufPojoSource(int numEvents, int numKeys) {
+            super(numEvents, numKeys);
+        }
+
+        @Override
+        protected void init() {
+            super.init();
+            template =
+                    org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.newBuilder()
+                            .setId(0)
+                            .setName("myName")
+                            .addAllOperationName(Arrays.asList("op1", "op2", "op3", "op4"))
+                            .addOperations(
+                                    org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation
+                                            .newBuilder()
+                                            .setId(1)
+                                            .setName("op1"))
+                            .addOperations(
+                                    org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation
+                                            .newBuilder()
+                                            .setId(2)
+                                            .setName("op2"))
+                            .addOperations(
+                                    org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyOperation
+                                            .newBuilder()
+                                            .setId(3)
+                                            .setName("op3"))
+                            .setOtherId1(1)
+                            .setOtherId2(2)
+                            .setOtherId3(3)
+                            .setSomeObject("null")
+                            .build();
+        }
+
+        @Override
+        protected org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo getElement(
+                int keyId) {
+            return org.apache.flink.benchmark.protobuf.MyPojoOuterClass.MyPojo.newBuilder(template)
+                    .setId(keyId)
+                    .build();
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/full/StringSerializationBenchmark.java b/src/main/java/org/apache/flink/benchmark/full/StringSerializationBenchmark.java
index 9490a17..57ded08 100644
--- a/src/main/java/org/apache/flink/benchmark/full/StringSerializationBenchmark.java
+++ b/src/main/java/org/apache/flink/benchmark/full/StringSerializationBenchmark.java
@@ -52,33 +52,34 @@ import java.util.concurrent.TimeUnit;
 @OutputTimeUnit(TimeUnit.MILLISECONDS)
 public class StringSerializationBenchmark extends BenchmarkBase {
 
-    public static void main(String[] args)
-            throws RunnerException {
-        Options options = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + StringSerializationBenchmark.class.getCanonicalName() + ".*")
-                .build();
-
-        new Runner(options).run();
-    }
-
+    public static final char[] asciiChars =
+            "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890".toCharArray();
+    public static final char[] russianChars =
+            "йцукенгшщзхъфывапролджэячсмитьбюЙЦУКЕНГШЩЗХЪФЫВАПРОЛДЖЭЯЧСМИТЬБЮ".toCharArray();
+    public static final char[] chineseChars =
+            "的是不了人我在有他这为之大来以个中上们到国说和地也子要时道出而于就下得可你年生".toCharArray();
     @Param({"ascii", "russian", "chinese"})
     public String type;
-
     @Param({"4", "128", "16384"})
     public String lengthStr;
-
     int length;
     String input;
-    public static final char[] asciiChars = "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM1234567890".toCharArray();
-    public static final char[] russianChars = "йцукенгшщзхъфывапролджэячсмитьбюЙЦУКЕНГШЩЗХЪФЫВАПРОЛДЖЭЯЧСМИТЬБЮ".toCharArray();
-    public static final char[] chineseChars = "的是不了人我在有他这为之大来以个中上们到国说和地也子要时道出而于就下得可你年生".toCharArray();
-
     ExecutionConfig config = new ExecutionConfig();
     TypeSerializer<String> serializer = TypeInformation.of(String.class).createSerializer(config);
     ByteArrayInputStream serializedBuffer;
     DataInputView serializedStream;
 
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*" + StringSerializationBenchmark.class.getCanonicalName() + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
     @Setup
     public void setup() throws IOException {
         length = Integer.parseInt(lengthStr);
@@ -117,10 +118,9 @@ public class StringSerializationBenchmark extends BenchmarkBase {
     private String generate(char[] charset, int length) {
         char[] buffer = new char[length];
         Random random = new Random();
-        for (int i=0; i<length; i++) {
+        for (int i = 0; i < length; i++) {
             buffer[i] = charset[random.nextInt(charset.length)];
         }
         return new String(buffer);
     }
-
 }
diff --git a/src/main/java/org/apache/flink/benchmark/functions/BaseSourceWithKeyRange.java b/src/main/java/org/apache/flink/benchmark/functions/BaseSourceWithKeyRange.java
index 11cefdd..7629c84 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/BaseSourceWithKeyRange.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/BaseSourceWithKeyRange.java
@@ -21,44 +21,41 @@ package org.apache.flink.benchmark.functions;
 import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
 import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext;
 
-/**
- * Abstract base class for sources with a defined number of events and a fixed key range.
- */
-abstract public class BaseSourceWithKeyRange<T> implements ParallelSourceFunction<T> {
-	private static final long serialVersionUID = 8318018060123048234L;
-
-	protected final int numKeys;
-	protected int remainingEvents;
-
-	public BaseSourceWithKeyRange(int numEvents, int numKeys) {
-		this.remainingEvents = numEvents;
-		this.numKeys = numKeys;
-	}
-
-	protected void init() {
-	}
-
-	protected abstract T getElement(int keyId);
-
-	@Override
-	public void run(SourceContext<T> out) {
-		init();
-
-		int keyId = 0;
-		while (--remainingEvents >= 0) {
-			T element = getElement(keyId);
-			synchronized (out.getCheckpointLock()) {
-				out.collect(element);
-			}
-			++keyId;
-			if (keyId >= numKeys) {
-				keyId = 0;
-			}
-		}
-	}
-
-	@Override
-	public void cancel() {
-		this.remainingEvents = 0;
-	}
+/** Abstract base class for sources with a defined number of events and a fixed key range. */
+public abstract class BaseSourceWithKeyRange<T> implements ParallelSourceFunction<T> {
+    private static final long serialVersionUID = 8318018060123048234L;
+
+    protected final int numKeys;
+    protected int remainingEvents;
+
+    public BaseSourceWithKeyRange(int numEvents, int numKeys) {
+        this.remainingEvents = numEvents;
+        this.numKeys = numKeys;
+    }
+
+    protected void init() {}
+
+    protected abstract T getElement(int keyId);
+
+    @Override
+    public void run(SourceContext<T> out) {
+        init();
+
+        int keyId = 0;
+        while (--remainingEvents >= 0) {
+            T element = getElement(keyId);
+            synchronized (out.getCheckpointLock()) {
+                out.collect(element);
+            }
+            ++keyId;
+            if (keyId >= numKeys) {
+                keyId = 0;
+            }
+        }
+    }
+
+    @Override
+    public void cancel() {
+        this.remainingEvents = 0;
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/functions/IntLongApplications.java b/src/main/java/org/apache/flink/benchmark/functions/IntLongApplications.java
index eed976c..2e6a798 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/IntLongApplications.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/IntLongApplications.java
@@ -27,8 +27,7 @@ public class IntLongApplications {
     public static <W extends Window> void reduceWithWindow(
             DataStreamSource<IntegerLongSource.Record> source,
             WindowAssigner<Object, W> windowAssigner) {
-        source
-                .map(new MultiplyIntLongByTwo())
+        source.map(new MultiplyIntLongByTwo())
                 .keyBy(record -> record.key)
                 .window(windowAssigner)
                 .reduce(new SumReduceIntLong())
diff --git a/src/main/java/org/apache/flink/benchmark/functions/IntegerLongSource.java b/src/main/java/org/apache/flink/benchmark/functions/IntegerLongSource.java
index 1555931..aeddfa3 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/IntegerLongSource.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/IntegerLongSource.java
@@ -21,6 +21,33 @@ package org.apache.flink.benchmark.functions;
 import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
 
 public class IntegerLongSource extends RichParallelSourceFunction<IntegerLongSource.Record> {
+    private volatile boolean running = true;
+    private int numberOfKeys;
+    private long numberOfElements;
+    public IntegerLongSource(int numberOfKeys, long numberOfElements) {
+        this.numberOfKeys = numberOfKeys;
+        this.numberOfElements = numberOfElements;
+    }
+
+    @Override
+    public void run(SourceContext<Record> ctx) throws Exception {
+        long counter = 0;
+
+        while (running && counter < numberOfElements) {
+            synchronized (ctx.getCheckpointLock()) {
+                ctx.collectWithTimestamp(
+                        Record.of((int) (counter % numberOfKeys), counter), counter);
+                counter++;
+            }
+        }
+        running = false;
+    }
+
+    @Override
+    public void cancel() {
+        running = false;
+    }
+
     public static final class Record {
         public final int key;
         public final long value;
@@ -47,31 +74,4 @@ public class IntegerLongSource extends RichParallelSourceFunction<IntegerLongSou
             return String.format("(%s, %s)", key, value);
         }
     }
-
-    private volatile boolean running = true;
-    private int numberOfKeys;
-    private long numberOfElements;
-
-    public IntegerLongSource(int numberOfKeys, long numberOfElements) {
-        this.numberOfKeys = numberOfKeys;
-        this.numberOfElements = numberOfElements;
-    }
-
-    @Override
-    public void run(SourceContext<Record> ctx) throws Exception {
-        long counter = 0;
-
-        while (running && counter < numberOfElements) {
-            synchronized (ctx.getCheckpointLock()) {
-                ctx.collectWithTimestamp(Record.of((int) (counter % numberOfKeys), counter), counter);
-                counter++;
-            }
-        }
-        running = false;
-    }
-
-    @Override
-    public void cancel() {
-        running = false;
-    }
-}
\ No newline at end of file
+}
diff --git a/src/main/java/org/apache/flink/benchmark/functions/LongNewSource.java b/src/main/java/org/apache/flink/benchmark/functions/LongNewSource.java
index 5066847..3ab7e42 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/LongNewSource.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/LongNewSource.java
@@ -33,33 +33,32 @@ import java.util.stream.IntStream;
  * The source should produce same records as {@link LongSource}.
  *
  * <p>{@link LongSource} generates records from 0 to {@code maxValue} for every parallel instance.
- * The original {@link NumberSequenceSource} would split the range 0 to {@code maxValue} between all subtasks.
+ * The original {@link NumberSequenceSource} would split the range 0 to {@code maxValue} between all
+ * subtasks.
  */
 public class LongNewSource extends NumberSequenceSource {
-	private final Boundedness boundedness;
-	private final long maxValue;
+    private final Boundedness boundedness;
+    private final long maxValue;
 
-	public LongNewSource(Boundedness boundedness, long maxValue) {
-		super(-1, -1); // we do not use the from/to of the underlying source
-		this.boundedness = boundedness;
-		this.maxValue = maxValue;
-	}
+    public LongNewSource(Boundedness boundedness, long maxValue) {
+        super(-1, -1); // we do not use the from/to of the underlying source
+        this.boundedness = boundedness;
+        this.maxValue = maxValue;
+    }
 
-	@Override
-	public Boundedness getBoundedness() {
-		return boundedness;
-	}
+    @Override
+    public Boundedness getBoundedness() {
+        return boundedness;
+    }
 
-	@Override
-	public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> createEnumerator(
-			SplitEnumeratorContext<NumberSequenceSplit> splitEnumeratorContext) {
+    @Override
+    public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> createEnumerator(
+            SplitEnumeratorContext<NumberSequenceSplit> splitEnumeratorContext) {
 
-		final List<NumberSequenceSplit> splits =
-				IntStream.range(0, splitEnumeratorContext.currentParallelism())
-						.mapToObj(
-								id -> new NumberSequenceSplit(String.valueOf(id), 0, maxValue)
-						)
-						.collect(Collectors.toList());
-		return new IteratorSourceEnumerator<>(splitEnumeratorContext, splits);
-	}
+        final List<NumberSequenceSplit> splits =
+                IntStream.range(0, splitEnumeratorContext.currentParallelism())
+                        .mapToObj(id -> new NumberSequenceSplit(String.valueOf(id), 0, maxValue))
+                        .collect(Collectors.toList());
+        return new IteratorSourceEnumerator<>(splitEnumeratorContext, splits);
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/functions/LongSourceType.java b/src/main/java/org/apache/flink/benchmark/functions/LongSourceType.java
index 2a33a19..d44eafc 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/LongSourceType.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/LongSourceType.java
@@ -25,32 +25,33 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 
 import java.util.function.BiFunction;
 
-/**
- * Enum based factory for different Long sources.
- */
+/** Enum based factory for different Long sources. */
 public enum LongSourceType {
-	LEGACY((env, maxValue) -> {
-		return env.addSource(new LongSource(maxValue));
-	}),
-	F27_BOUNDED((env, maxValue) -> {
-		return env.fromSource(
-				new LongNewSource(Boundedness.BOUNDED, maxValue),
-				WatermarkStrategy.noWatermarks(),
-				"NewLongSource");
-	}),
-	F27_UNBOUNDED((env, maxValue) -> {
-		return env.fromSource(
-				new LongNewSource(Boundedness.CONTINUOUS_UNBOUNDED, maxValue),
-				WatermarkStrategy.noWatermarks(),
-				"NewLongSource");
-	});
-	private final BiFunction<StreamExecutionEnvironment, Long, DataStreamSource<Long>> factory;
+    LEGACY(
+            (env, maxValue) -> {
+                return env.addSource(new LongSource(maxValue));
+            }),
+    F27_BOUNDED(
+            (env, maxValue) -> {
+                return env.fromSource(
+                        new LongNewSource(Boundedness.BOUNDED, maxValue),
+                        WatermarkStrategy.noWatermarks(),
+                        "NewLongSource");
+            }),
+    F27_UNBOUNDED(
+            (env, maxValue) -> {
+                return env.fromSource(
+                        new LongNewSource(Boundedness.CONTINUOUS_UNBOUNDED, maxValue),
+                        WatermarkStrategy.noWatermarks(),
+                        "NewLongSource");
+            });
+    private final BiFunction<StreamExecutionEnvironment, Long, DataStreamSource<Long>> factory;
 
-	LongSourceType(BiFunction<StreamExecutionEnvironment, Long, DataStreamSource<Long>> factory) {
-		this.factory = factory;
-	}
+    LongSourceType(BiFunction<StreamExecutionEnvironment, Long, DataStreamSource<Long>> factory) {
+        this.factory = factory;
+    }
 
-	public DataStreamSource<Long> source(StreamExecutionEnvironment environment, long maxValue) {
-		return factory.apply(environment, maxValue);
-	}
+    public DataStreamSource<Long> source(StreamExecutionEnvironment environment, long maxValue) {
+        return factory.apply(environment, maxValue);
+    }
 };
diff --git a/src/main/java/org/apache/flink/benchmark/functions/MultiplyIntLongByTwo.java b/src/main/java/org/apache/flink/benchmark/functions/MultiplyIntLongByTwo.java
index a889b59..5e5f12f 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/MultiplyIntLongByTwo.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/MultiplyIntLongByTwo.java
@@ -20,7 +20,8 @@ package org.apache.flink.benchmark.functions;
 
 import org.apache.flink.api.common.functions.MapFunction;
 
-public class MultiplyIntLongByTwo implements MapFunction<IntegerLongSource.Record, IntegerLongSource.Record> {
+public class MultiplyIntLongByTwo
+        implements MapFunction<IntegerLongSource.Record, IntegerLongSource.Record> {
     @Override
     public IntegerLongSource.Record map(IntegerLongSource.Record record) throws Exception {
         return IntegerLongSource.Record.of(record.key, record.value * 2);
diff --git a/src/main/java/org/apache/flink/benchmark/functions/QueuingLongSource.java b/src/main/java/org/apache/flink/benchmark/functions/QueuingLongSource.java
index dc746ba..a0935ae 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/QueuingLongSource.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/QueuingLongSource.java
@@ -20,34 +20,34 @@ package org.apache.flink.benchmark.functions;
 
 public class QueuingLongSource extends LongSource {
 
-	private static Object lock = new Object();
+    private static Object lock = new Object();
 
-	private static int currentRank = 1;
+    private static int currentRank = 1;
 
-	private final int rank;
+    private final int rank;
 
-	public QueuingLongSource(int rank, long maxValue) {
-		super(maxValue);
-		this.rank = rank;
-	}
+    public QueuingLongSource(int rank, long maxValue) {
+        super(maxValue);
+        this.rank = rank;
+    }
 
-	@Override
-	public void run(SourceContext<Long> ctx) throws Exception {
-		synchronized (lock) {
-			while (currentRank != rank) {
-				lock.wait();
-			}
-		}
+    public static void reset() {
+        currentRank = 1;
+    }
 
-		super.run(ctx);
+    @Override
+    public void run(SourceContext<Long> ctx) throws Exception {
+        synchronized (lock) {
+            while (currentRank != rank) {
+                lock.wait();
+            }
+        }
 
-		synchronized (lock) {
-			currentRank++;
-			lock.notifyAll();
-		}
-	}
+        super.run(ctx);
 
-	public static void reset() {
-		currentRank = 1;
-	}
+        synchronized (lock) {
+            currentRank++;
+            lock.notifyAll();
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/functions/SuccessException.java b/src/main/java/org/apache/flink/benchmark/functions/SuccessException.java
index 37efab4..8244fd5 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/SuccessException.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/SuccessException.java
@@ -18,5 +18,4 @@
 
 package org.apache.flink.benchmark.functions;
 
-public class SuccessException extends Exception {
-}
+public class SuccessException extends Exception {}
diff --git a/src/main/java/org/apache/flink/benchmark/functions/SumReduceIntLong.java b/src/main/java/org/apache/flink/benchmark/functions/SumReduceIntLong.java
index d15830d..9b8b8ea 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/SumReduceIntLong.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/SumReduceIntLong.java
@@ -22,7 +22,8 @@ import org.apache.flink.api.common.functions.ReduceFunction;
 
 public class SumReduceIntLong implements ReduceFunction<IntegerLongSource.Record> {
     @Override
-    public IntegerLongSource.Record reduce(IntegerLongSource.Record var1, IntegerLongSource.Record var2) throws Exception {
+    public IntegerLongSource.Record reduce(
+            IntegerLongSource.Record var1, IntegerLongSource.Record var2) throws Exception {
         return IntegerLongSource.Record.of(var1.key, var1.value + var2.value);
     }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/functions/TestUtils.java b/src/main/java/org/apache/flink/benchmark/functions/TestUtils.java
index baff238..a7451e8 100644
--- a/src/main/java/org/apache/flink/benchmark/functions/TestUtils.java
+++ b/src/main/java/org/apache/flink/benchmark/functions/TestUtils.java
@@ -25,16 +25,14 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 
 import static org.junit.Assert.fail;
 
-/**
- * Test utilities.
- */
+/** Test utilities. */
 public class TestUtils {
 
-    public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name) throws Exception {
+    public static JobExecutionResult tryExecute(StreamExecutionEnvironment see, String name)
+            throws Exception {
         try {
             return see.execute(name);
-        }
-        catch (ProgramInvocationException | JobExecutionException root) {
+        } catch (ProgramInvocationException | JobExecutionException root) {
             Throwable cause = root.getCause();
 
             // search for nested SuccessExceptions
@@ -43,8 +41,7 @@ public class TestUtils {
                 if (cause == null || depth++ == 20) {
                     root.printStackTrace();
                     fail("Test failed: " + root.getMessage());
-                }
-                else {
+                } else {
                     cause = cause.getCause();
                 }
             }
diff --git a/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoCoStreamMap.java b/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoCoStreamMap.java
index e6b277a..5f1f335 100644
--- a/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoCoStreamMap.java
+++ b/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoCoStreamMap.java
@@ -22,17 +22,16 @@ import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
 import org.apache.flink.streaming.api.operators.TwoInputStreamOperator;
 import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
 
-public class MultiplyByTwoCoStreamMap
-		extends AbstractStreamOperator<Long>
-		implements TwoInputStreamOperator<Long, Long, Long> {
+public class MultiplyByTwoCoStreamMap extends AbstractStreamOperator<Long>
+        implements TwoInputStreamOperator<Long, Long, Long> {
 
-	@Override
-	public void processElement1(StreamRecord<Long> element) {
-		output.collect(element.replace(element.getValue() * 2));
-	}
+    @Override
+    public void processElement1(StreamRecord<Long> element) {
+        output.collect(element.replace(element.getValue() * 2));
+    }
 
-	@Override
-	public void processElement2(StreamRecord<Long> element) {
-		output.collect(element.replace(element.getValue() * 2));
-	}
+    @Override
+    public void processElement2(StreamRecord<Long> element) {
+        output.collect(element.replace(element.getValue() * 2));
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoOperatorFactory.java b/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoOperatorFactory.java
index 657bc3f..fd76371 100644
--- a/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoOperatorFactory.java
+++ b/src/main/java/org/apache/flink/benchmark/operators/MultiplyByTwoOperatorFactory.java
@@ -32,39 +32,39 @@ import java.util.List;
 
 @SuppressWarnings({"unchecked", "rawtypes"})
 public class MultiplyByTwoOperatorFactory extends AbstractStreamOperatorFactory<Long> {
-	@Override
-	public <T extends StreamOperator<Long>> T createStreamOperator(StreamOperatorParameters<Long> parameters) {
-		return (T) new MultiplyByTwoOperator(parameters);
-	}
+    @Override
+    public <T extends StreamOperator<Long>> T createStreamOperator(
+            StreamOperatorParameters<Long> parameters) {
+        return (T) new MultiplyByTwoOperator(parameters);
+    }
 
-	@Override
-	public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) {
-		return MultiplyByTwoOperator.class;
-	}
+    @Override
+    public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) {
+        return MultiplyByTwoOperator.class;
+    }
 
-	public static class MultiplyByTwoOperator extends AbstractStreamOperatorV2<Long> implements MultipleInputStreamOperator<Long> {
-		public MultiplyByTwoOperator(StreamOperatorParameters<Long> parameters) {
-			super(parameters, 2);
-		}
+    public static class MultiplyByTwoOperator extends AbstractStreamOperatorV2<Long>
+            implements MultipleInputStreamOperator<Long> {
+        public MultiplyByTwoOperator(StreamOperatorParameters<Long> parameters) {
+            super(parameters, 2);
+        }
 
-		@Override
-		public List<Input> getInputs() {
-			return Arrays.asList(
-					new MultiplyByTwoOperator.MultiplyByTwoInput(this, 1),
-					new MultiplyByTwoOperator.MultiplyByTwoInput(this, 2));
-		}
+        @Override
+        public List<Input> getInputs() {
+            return Arrays.asList(
+                    new MultiplyByTwoOperator.MultiplyByTwoInput(this, 1),
+                    new MultiplyByTwoOperator.MultiplyByTwoInput(this, 2));
+        }
 
-		private static class MultiplyByTwoInput extends AbstractInput<Long, Long> {
-			MultiplyByTwoInput(
-					AbstractStreamOperatorV2<Long> owner,
-					int inputId) {
-				super(owner, inputId);
-			}
+        private static class MultiplyByTwoInput extends AbstractInput<Long, Long> {
+            MultiplyByTwoInput(AbstractStreamOperatorV2<Long> owner, int inputId) {
+                super(owner, inputId);
+            }
 
-			@Override
-			public void processElement(StreamRecord<Long> element) {
-				output.collect(element.replace(element.getValue() * 2));
-			}
-		}
-	}
+            @Override
+            public void processElement(StreamRecord<Long> element) {
+                output.collect(element.replace(element.getValue() * 2));
+            }
+        }
+    }
 }
diff --git a/src/main/java/org/apache/flink/benchmark/operators/RecordSource.java b/src/main/java/org/apache/flink/benchmark/operators/RecordSource.java
index 5f8c610..3b21892 100644
--- a/src/main/java/org/apache/flink/benchmark/operators/RecordSource.java
+++ b/src/main/java/org/apache/flink/benchmark/operators/RecordSource.java
@@ -43,21 +43,6 @@ import java.util.concurrent.CompletableFuture;
 public class RecordSource implements Source<Record, EmptySplit, EmptyEnumeratorState> {
     public static final int DEFAULT_PAYLOAD_SIZE = 1024;
     private final int recordSize;
-
-    public static class Record {
-        public long value;
-        public byte[] payload;
-
-        public Record() {
-            this(0, DEFAULT_PAYLOAD_SIZE);
-        }
-
-        public Record(long value, int recordSize) {
-            this.value = value;
-            payload = new byte[recordSize];
-        }
-    }
-
     private final int minCheckpoints;
 
     public RecordSource(int minCheckpoints) {
@@ -101,6 +86,20 @@ public class RecordSource implements Source<Record, EmptySplit, EmptyEnumeratorS
         return new EnumeratorVersionedSerializer();
     }
 
+    public static class Record {
+        public long value;
+        public byte[] payload;
+
+        public Record() {
+            this(0, DEFAULT_PAYLOAD_SIZE);
+        }
+
+        public Record(long value, int recordSize) {
+            this.value = value;
+            payload = new byte[recordSize];
+        }
+    }
+
     public static class RecordSourceReader implements SourceReader<Record, EmptySplit> {
         private final int minCheckpoints;
         private final int recordSize;
diff --git a/src/main/java/org/apache/flink/benchmark/thrift/MyOperation.java b/src/main/java/org/apache/flink/benchmark/thrift/MyOperation.java
index 2fa409a..540813d 100644
--- a/src/main/java/org/apache/flink/benchmark/thrift/MyOperation.java
+++ b/src/main/java/org/apache/flink/benchmark/thrift/MyOperation.java
@@ -1,477 +1,513 @@
 /**
  * Autogenerated by Thrift Compiler (0.13.0)
  *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
+ * <p>DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *
+ * @generated
  */
 package org.apache.flink.benchmark.thrift;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-03-06")
-public class MyOperation implements org.apache.thrift.TBase<MyOperation, MyOperation._Fields>, java.io.Serializable, Cloneable, Comparable<MyOperation> {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MyOperation");
+@javax.annotation.Generated(
+        value = "Autogenerated by Thrift Compiler (0.13.0)",
+        date = "2020-03-06")
+public class MyOperation
+        implements org.apache.thrift.TBase<MyOperation, MyOperation._Fields>,
+                java.io.Serializable,
+                Cloneable,
+                Comparable<MyOperation> {
+    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData>
+            metaDataMap;
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC =
+            new org.apache.thrift.protocol.TStruct("MyOperation");
+    private static final org.apache.thrift.protocol.TField ID_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "id", org.apache.thrift.protocol.TType.I32, (short) 1);
+    private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "name", org.apache.thrift.protocol.TType.STRING, (short) 2);
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY =
+            new MyOperationStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY =
+            new MyOperationTupleSchemeFactory();
+    // isset id assignments
+    private static final int __ID_ISSET_ID = 0;
 
-  private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I32, (short)1);
-  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)2);
+    static {
+        java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap =
+                new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
+                        _Fields.class);
+        tmpMap.put(
+                _Fields.ID,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "id",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.I32, "int")));
+        tmpMap.put(
+                _Fields.NAME,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "name",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.STRING)));
+        metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
+        org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(
+                MyOperation.class, metaDataMap);
+    }
 
-  private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new MyOperationStandardSchemeFactory();
-  private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new MyOperationTupleSchemeFactory();
+    public int id; // required
+    public @org.apache.thrift.annotation.Nullable java.lang.String name; // required
+    private byte __isset_bitfield = 0;
 
-  public int id; // required
-  public @org.apache.thrift.annotation.Nullable java.lang.String name; // required
+    public MyOperation() {}
 
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    ID((short)1, "id"),
-    NAME((short)2, "name");
+    public MyOperation(int id, java.lang.String name) {
+        this();
+        this.id = id;
+        setIdIsSet(true);
+        this.name = name;
+    }
 
-    private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
+    /** Performs a deep copy on <i>other</i>. */
+    public MyOperation(MyOperation other) {
+        __isset_bitfield = other.__isset_bitfield;
+        this.id = other.id;
+        if (other.isSetName()) {
+            this.name = other.name;
+        }
+    }
 
-    static {
-      for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
+    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(
+            org.apache.thrift.protocol.TProtocol proto) {
+        return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme())
+                        ? STANDARD_SCHEME_FACTORY
+                        : TUPLE_SCHEME_FACTORY)
+                .getScheme();
+    }
+
+    public MyOperation deepCopy() {
+        return new MyOperation(this);
+    }
+
+    @Override
+    public void clear() {
+        setIdIsSet(false);
+        this.id = 0;
+        this.name = null;
+    }
+
+    public int getId() {
+        return this.id;
+    }
+
+    public MyOperation setId(int id) {
+        this.id = id;
+        setIdIsSet(true);
+        return this;
+    }
+
+    public void unsetId() {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
+    }
+
+    /** Returns true if field id is set (has been assigned a value) and false otherwise */
+    public boolean isSetId() {
+        return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
+    }
+
+    public void setIdIsSet(boolean value) {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
     @org.apache.thrift.annotation.Nullable
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // ID
-          return ID;
-        case 2: // NAME
-          return NAME;
-        default:
-          return null;
-      }
+    public java.lang.String getName() {
+        return this.name;
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
+    public MyOperation setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
+        this.name = name;
+        return this;
+    }
+
+    public void unsetName() {
+        this.name = null;
+    }
+
+    /** Returns true if field name is set (has been assigned a value) and false otherwise */
+    public boolean isSetName() {
+        return this.name != null;
+    }
+
+    public void setNameIsSet(boolean value) {
+        if (!value) {
+            this.name = null;
+        }
+    }
+
+    public void setFieldValue(
+            _Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
+        switch (field) {
+            case ID:
+                if (value == null) {
+                    unsetId();
+                } else {
+                    setId((java.lang.Integer) value);
+                }
+                break;
+
+            case NAME:
+                if (value == null) {
+                    unsetName();
+                } else {
+                    setName((java.lang.String) value);
+                }
+                break;
+        }
+    }
+
+    @org.apache.thrift.annotation.Nullable
+    public java.lang.Object getFieldValue(_Fields field) {
+        switch (field) {
+            case ID:
+                return getId();
+
+            case NAME:
+                return getName();
+        }
+        throw new java.lang.IllegalStateException();
     }
 
     /**
-     * Find the _Fields constant that matches name, or null if its not found.
+     * Returns true if field corresponding to fieldID is set (has been assigned a value) and false
+     * otherwise
      */
-    @org.apache.thrift.annotation.Nullable
-    public static _Fields findByName(java.lang.String name) {
-      return byName.get(name);
-    }
-
-    private final short _thriftId;
-    private final java.lang.String _fieldName;
-
-    _Fields(short thriftId, java.lang.String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public java.lang.String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  private static final int __ID_ISSET_ID = 0;
-  private byte __isset_bitfield = 0;
-  public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32        , "int")));
-    tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(MyOperation.class, metaDataMap);
-  }
-
-  public MyOperation() {
-  }
-
-  public MyOperation(
-    int id,
-    java.lang.String name)
-  {
-    this();
-    this.id = id;
-    setIdIsSet(true);
-    this.name = name;
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public MyOperation(MyOperation other) {
-    __isset_bitfield = other.__isset_bitfield;
-    this.id = other.id;
-    if (other.isSetName()) {
-      this.name = other.name;
-    }
-  }
-
-  public MyOperation deepCopy() {
-    return new MyOperation(this);
-  }
-
-  @Override
-  public void clear() {
-    setIdIsSet(false);
-    this.id = 0;
-    this.name = null;
-  }
-
-  public int getId() {
-    return this.id;
-  }
-
-  public MyOperation setId(int id) {
-    this.id = id;
-    setIdIsSet(true);
-    return this;
-  }
-
-  public void unsetId() {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
-  }
-
-  /** Returns true if field id is set (has been assigned a value) and false otherwise */
-  public boolean isSetId() {
-    return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
-  }
-
-  public void setIdIsSet(boolean value) {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.lang.String getName() {
-    return this.name;
-  }
-
-  public MyOperation setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
-    this.name = name;
-    return this;
-  }
-
-  public void unsetName() {
-    this.name = null;
-  }
-
-  /** Returns true if field name is set (has been assigned a value) and false otherwise */
-  public boolean isSetName() {
-    return this.name != null;
-  }
-
-  public void setNameIsSet(boolean value) {
-    if (!value) {
-      this.name = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
-    switch (field) {
-    case ID:
-      if (value == null) {
-        unsetId();
-      } else {
-        setId((java.lang.Integer)value);
-      }
-      break;
-
-    case NAME:
-      if (value == null) {
-        unsetName();
-      } else {
-        setName((java.lang.String)value);
-      }
-      break;
-
-    }
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.lang.Object getFieldValue(_Fields field) {
-    switch (field) {
-    case ID:
-      return getId();
-
-    case NAME:
-      return getName();
-
-    }
-    throw new java.lang.IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new java.lang.IllegalArgumentException();
-    }
-
-    switch (field) {
-    case ID:
-      return isSetId();
-    case NAME:
-      return isSetName();
-    }
-    throw new java.lang.IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(java.lang.Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof MyOperation)
-      return this.equals((MyOperation)that);
-    return false;
-  }
-
-  public boolean equals(MyOperation that) {
-    if (that == null)
-      return false;
-    if (this == that)
-      return true;
-
-    boolean this_present_id = true;
-    boolean that_present_id = true;
-    if (this_present_id || that_present_id) {
-      if (!(this_present_id && that_present_id))
-        return false;
-      if (this.id != that.id)
-        return false;
+    public boolean isSet(_Fields field) {
+        if (field == null) {
+            throw new java.lang.IllegalArgumentException();
+        }
+
+        switch (field) {
+            case ID:
+                return isSetId();
+            case NAME:
+                return isSetName();
+        }
+        throw new java.lang.IllegalStateException();
     }
 
-    boolean this_present_name = true && this.isSetName();
-    boolean that_present_name = true && that.isSetName();
-    if (this_present_name || that_present_name) {
-      if (!(this_present_name && that_present_name))
-        return false;
-      if (!this.name.equals(that.name))
+    @Override
+    public boolean equals(java.lang.Object that) {
+        if (that == null) return false;
+        if (that instanceof MyOperation) return this.equals((MyOperation) that);
         return false;
     }
 
-    return true;
-  }
+    public boolean equals(MyOperation that) {
+        if (that == null) return false;
+        if (this == that) return true;
+
+        boolean this_present_id = true;
+        boolean that_present_id = true;
+        if (this_present_id || that_present_id) {
+            if (!(this_present_id && that_present_id)) return false;
+            if (this.id != that.id) return false;
+        }
 
-  @Override
-  public int hashCode() {
-    int hashCode = 1;
+        boolean this_present_name = true && this.isSetName();
+        boolean that_present_name = true && that.isSetName();
+        if (this_present_name || that_present_name) {
+            if (!(this_present_name && that_present_name)) return false;
+            if (!this.name.equals(that.name)) return false;
+        }
 
-    hashCode = hashCode * 8191 + id;
+        return true;
+    }
 
-    hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);
-    if (isSetName())
-      hashCode = hashCode * 8191 + name.hashCode();
+    @Override
+    public int hashCode() {
+        int hashCode = 1;
+
+        hashCode = hashCode * 8191 + id;
+
+        hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);
+        if (isSetName()) hashCode = hashCode * 8191 + name.hashCode();
+
+        return hashCode;
+    }
+
+    @Override
+    public int compareTo(MyOperation other) {
+        if (!getClass().equals(other.getClass())) {
+            return getClass().getName().compareTo(other.getClass().getName());
+        }
 
-    return hashCode;
-  }
+        int lastComparison = 0;
 
-  @Override
-  public int compareTo(MyOperation other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-
-    lastComparison = java.lang.Boolean.valueOf(isSetId()).compareTo(other.isSetId());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetId()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetName()).compareTo(other.isSetName());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetName()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    scheme(iprot).read(iprot, this);
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    scheme(oprot).write(oprot, this);
-  }
-
-  @Override
-  public java.lang.String toString() {
-    java.lang.StringBuilder sb = new java.lang.StringBuilder("MyOperation(");
-    boolean first = true;
-
-    sb.append("id:");
-    sb.append(this.id);
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("name:");
-    if (this.name == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.name);
-    }
-    first = false;
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-    // check for sub-struct validity
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
-    try {
-      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-      __isset_bitfield = 0;
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private static class MyOperationStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-    public MyOperationStandardScheme getScheme() {
-      return new MyOperationStandardScheme();
-    }
-  }
-
-  private static class MyOperationStandardScheme extends org.apache.thrift.scheme.StandardScheme<MyOperation> {
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot, MyOperation struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TField schemeField;
-      iprot.readStructBegin();
-      while (true)
-      {
-        schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-          break;
+        lastComparison = java.lang.Boolean.valueOf(isSetId()).compareTo(other.isSetId());
+        if (lastComparison != 0) {
+            return lastComparison;
         }
-        switch (schemeField.id) {
-          case 1: // ID
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.id = iprot.readI32();
-              struct.setIdIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        if (isSetId()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          case 2: // NAME
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.name = iprot.readString();
-              struct.setNameIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        lastComparison = java.lang.Boolean.valueOf(isSetName()).compareTo(other.isSetName());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetName()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          default:
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
+        return 0;
+    }
 
-      // check for required fields of primitive type, which can't be checked in the validate method
-      struct.validate();
+    @org.apache.thrift.annotation.Nullable
+    public _Fields fieldForId(int fieldId) {
+        return _Fields.findByThriftId(fieldId);
     }
 
-    public void write(org.apache.thrift.protocol.TProtocol oprot, MyOperation struct) throws org.apache.thrift.TException {
-      struct.validate();
+    public void read(org.apache.thrift.protocol.TProtocol iprot)
+            throws org.apache.thrift.TException {
+        scheme(iprot).read(iprot, this);
+    }
 
-      oprot.writeStructBegin(STRUCT_DESC);
-      oprot.writeFieldBegin(ID_FIELD_DESC);
-      oprot.writeI32(struct.id);
-      oprot.writeFieldEnd();
-      if (struct.name != null) {
-        oprot.writeFieldBegin(NAME_FIELD_DESC);
-        oprot.writeString(struct.name);
-        oprot.writeFieldEnd();
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
+    public void write(org.apache.thrift.protocol.TProtocol oprot)
+            throws org.apache.thrift.TException {
+        scheme(oprot).write(oprot, this);
     }
 
-  }
+    @Override
+    public java.lang.String toString() {
+        java.lang.StringBuilder sb = new java.lang.StringBuilder("MyOperation(");
+        boolean first = true;
+
+        sb.append("id:");
+        sb.append(this.id);
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("name:");
+        if (this.name == null) {
+            sb.append("null");
+        } else {
+            sb.append(this.name);
+        }
+        first = false;
+        sb.append(")");
+        return sb.toString();
+    }
 
-  private static class MyOperationTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-    public MyOperationTupleScheme getScheme() {
-      return new MyOperationTupleScheme();
+    public void validate() throws org.apache.thrift.TException {
+        // check for required fields
+        // check for sub-struct validity
     }
-  }
 
-  private static class MyOperationTupleScheme extends org.apache.thrift.scheme.TupleScheme<MyOperation> {
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+        try {
+            write(
+                    new org.apache.thrift.protocol.TCompactProtocol(
+                            new org.apache.thrift.transport.TIOStreamTransport(out)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
+    }
 
-    @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, MyOperation struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-      java.util.BitSet optionals = new java.util.BitSet();
-      if (struct.isSetId()) {
-        optionals.set(0);
-      }
-      if (struct.isSetName()) {
-        optionals.set(1);
-      }
-      oprot.writeBitSet(optionals, 2);
-      if (struct.isSetId()) {
-        oprot.writeI32(struct.id);
-      }
-      if (struct.isSetName()) {
-        oprot.writeString(struct.name);
-      }
+    private void readObject(java.io.ObjectInputStream in)
+            throws java.io.IOException, java.lang.ClassNotFoundException {
+        try {
+            // it doesn't seem like you should have to do this, but java serialization is wacky, and
+            // doesn't call the default constructor.
+            __isset_bitfield = 0;
+            read(
+                    new org.apache.thrift.protocol.TCompactProtocol(
+                            new org.apache.thrift.transport.TIOStreamTransport(in)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
     }
 
-    @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, MyOperation struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-      java.util.BitSet incoming = iprot.readBitSet(2);
-      if (incoming.get(0)) {
-        struct.id = iprot.readI32();
-        struct.setIdIsSet(true);
-      }
-      if (incoming.get(1)) {
-        struct.name = iprot.readString();
-        struct.setNameIsSet(true);
-      }
-    }
-  }
-
-  private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
-    return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
-  }
-}
+    /**
+     * The set of fields this struct contains, along with convenience methods for finding and
+     * manipulating them.
+     */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+        ID((short) 1, "id"),
+        NAME((short) 2, "name");
+
+        private static final java.util.Map<java.lang.String, _Fields> byName =
+                new java.util.HashMap<java.lang.String, _Fields>();
+
+        static {
+            for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
+                byName.put(field.getFieldName(), field);
+            }
+        }
+
+        private final short _thriftId;
+        private final java.lang.String _fieldName;
+
+        _Fields(short thriftId, java.lang.String fieldName) {
+            _thriftId = thriftId;
+            _fieldName = fieldName;
+        }
+
+        /** Find the _Fields constant that matches fieldId, or null if its not found. */
+        @org.apache.thrift.annotation.Nullable
+        public static _Fields findByThriftId(int fieldId) {
+            switch (fieldId) {
+                case 1: // ID
+                    return ID;
+                case 2: // NAME
+                    return NAME;
+                default:
+                    return null;
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, throwing an exception if it is not found.
+         */
+        public static _Fields findByThriftIdOrThrow(int fieldId) {
+            _Fields fields = findByThriftId(fieldId);
+            if (fields == null)
+                throw new java.lang.IllegalArgumentException(
+                        "Field " + fieldId + " doesn't exist!");
+            return fields;
+        }
+
+        /** Find the _Fields constant that matches name, or null if its not found. */
+        @org.apache.thrift.annotation.Nullable
+        public static _Fields findByName(java.lang.String name) {
+            return byName.get(name);
+        }
 
+        public short getThriftFieldId() {
+            return _thriftId;
+        }
+
+        public java.lang.String getFieldName() {
+            return _fieldName;
+        }
+    }
+
+    private static class MyOperationStandardSchemeFactory
+            implements org.apache.thrift.scheme.SchemeFactory {
+        public MyOperationStandardScheme getScheme() {
+            return new MyOperationStandardScheme();
+        }
+    }
+
+    private static class MyOperationStandardScheme
+            extends org.apache.thrift.scheme.StandardScheme<MyOperation> {
+
+        public void read(org.apache.thrift.protocol.TProtocol iprot, MyOperation struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TField schemeField;
+            iprot.readStructBegin();
+            while (true) {
+                schemeField = iprot.readFieldBegin();
+                if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+                    break;
+                }
+                switch (schemeField.id) {
+                    case 1: // ID
+                        if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                            struct.id = iprot.readI32();
+                            struct.setIdIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 2: // NAME
+                        if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                            struct.name = iprot.readString();
+                            struct.setNameIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    default:
+                        org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                }
+                iprot.readFieldEnd();
+            }
+            iprot.readStructEnd();
+
+            // check for required fields of primitive type, which can't be checked in the validate
+            // method
+            struct.validate();
+        }
+
+        public void write(org.apache.thrift.protocol.TProtocol oprot, MyOperation struct)
+                throws org.apache.thrift.TException {
+            struct.validate();
+
+            oprot.writeStructBegin(STRUCT_DESC);
+            oprot.writeFieldBegin(ID_FIELD_DESC);
+            oprot.writeI32(struct.id);
+            oprot.writeFieldEnd();
+            if (struct.name != null) {
+                oprot.writeFieldBegin(NAME_FIELD_DESC);
+                oprot.writeString(struct.name);
+                oprot.writeFieldEnd();
+            }
+            oprot.writeFieldStop();
+            oprot.writeStructEnd();
+        }
+    }
+
+    private static class MyOperationTupleSchemeFactory
+            implements org.apache.thrift.scheme.SchemeFactory {
+        public MyOperationTupleScheme getScheme() {
+            return new MyOperationTupleScheme();
+        }
+    }
+
+    private static class MyOperationTupleScheme
+            extends org.apache.thrift.scheme.TupleScheme<MyOperation> {
+
+        @Override
+        public void write(org.apache.thrift.protocol.TProtocol prot, MyOperation struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TTupleProtocol oprot =
+                    (org.apache.thrift.protocol.TTupleProtocol) prot;
+            java.util.BitSet optionals = new java.util.BitSet();
+            if (struct.isSetId()) {
+                optionals.set(0);
+            }
+            if (struct.isSetName()) {
+                optionals.set(1);
+            }
+            oprot.writeBitSet(optionals, 2);
+            if (struct.isSetId()) {
+                oprot.writeI32(struct.id);
+            }
+            if (struct.isSetName()) {
+                oprot.writeString(struct.name);
+            }
+        }
+
+        @Override
+        public void read(org.apache.thrift.protocol.TProtocol prot, MyOperation struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TTupleProtocol iprot =
+                    (org.apache.thrift.protocol.TTupleProtocol) prot;
+            java.util.BitSet incoming = iprot.readBitSet(2);
+            if (incoming.get(0)) {
+                struct.id = iprot.readI32();
+                struct.setIdIsSet(true);
+            }
+            if (incoming.get(1)) {
+                struct.name = iprot.readString();
+                struct.setNameIsSet(true);
+            }
+        }
+    }
+}
diff --git a/src/main/java/org/apache/flink/benchmark/thrift/MyPojo.java b/src/main/java/org/apache/flink/benchmark/thrift/MyPojo.java
index c56c435..9c0c75b 100644
--- a/src/main/java/org/apache/flink/benchmark/thrift/MyPojo.java
+++ b/src/main/java/org/apache/flink/benchmark/thrift/MyPojo.java
@@ -1,1188 +1,1286 @@
 /**
  * Autogenerated by Thrift Compiler (0.13.0)
  *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- *  @generated
+ * <p>DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *
+ * @generated
  */
 package org.apache.flink.benchmark.thrift;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-03-06")
-public class MyPojo implements org.apache.thrift.TBase<MyPojo, MyPojo._Fields>, java.io.Serializable, Cloneable, Comparable<MyPojo> {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MyPojo");
-
-  private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I32, (short)1);
-  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)2);
-  private static final org.apache.thrift.protocol.TField OPERATION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("operationName", org.apache.thrift.protocol.TType.LIST, (short)3);
-  private static final org.apache.thrift.protocol.TField OPERATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("operations", org.apache.thrift.protocol.TType.LIST, (short)4);
-  private static final org.apache.thrift.protocol.TField OTHER_ID1_FIELD_DESC = new org.apache.thrift.protocol.TField("otherId1", org.apache.thrift.protocol.TType.I32, (short)5);
-  private static final org.apache.thrift.protocol.TField OTHER_ID2_FIELD_DESC = new org.apache.thrift.protocol.TField("otherId2", org.apache.thrift.protocol.TType.I32, (short)6);
-  private static final org.apache.thrift.protocol.TField OTHER_ID3_FIELD_DESC = new org.apache.thrift.protocol.TField("otherId3", org.apache.thrift.protocol.TType.I32, (short)7);
-  private static final org.apache.thrift.protocol.TField SOME_OBJECT_FIELD_DESC = new org.apache.thrift.protocol.TField("someObject", org.apache.thrift.protocol.TType.STRING, (short)8);
-
-  private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new MyPojoStandardSchemeFactory();
-  private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new MyPojoTupleSchemeFactory();
-
-  public int id; // required
-  public @org.apache.thrift.annotation.Nullable java.lang.String name; // required
-  public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> operationName; // required
-  public @org.apache.thrift.annotation.Nullable java.util.List<MyOperation> operations; // required
-  public int otherId1; // required
-  public int otherId2; // required
-  public int otherId3; // required
-  public @org.apache.thrift.annotation.Nullable java.lang.String someObject; // optional
-
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    ID((short)1, "id"),
-    NAME((short)2, "name"),
-    OPERATION_NAME((short)3, "operationName"),
-    OPERATIONS((short)4, "operations"),
-    OTHER_ID1((short)5, "otherId1"),
-    OTHER_ID2((short)6, "otherId2"),
-    OTHER_ID3((short)7, "otherId3"),
-    SOME_OBJECT((short)8, "someObject");
-
-    private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
+@javax.annotation.Generated(
+        value = "Autogenerated by Thrift Compiler (0.13.0)",
+        date = "2020-03-06")
+public class MyPojo
+        implements org.apache.thrift.TBase<MyPojo, MyPojo._Fields>,
+                java.io.Serializable,
+                Cloneable,
+                Comparable<MyPojo> {
+    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData>
+            metaDataMap;
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC =
+            new org.apache.thrift.protocol.TStruct("MyPojo");
+    private static final org.apache.thrift.protocol.TField ID_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "id", org.apache.thrift.protocol.TType.I32, (short) 1);
+    private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "name", org.apache.thrift.protocol.TType.STRING, (short) 2);
+    private static final org.apache.thrift.protocol.TField OPERATION_NAME_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "operationName", org.apache.thrift.protocol.TType.LIST, (short) 3);
+    private static final org.apache.thrift.protocol.TField OPERATIONS_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "operations", org.apache.thrift.protocol.TType.LIST, (short) 4);
+    private static final org.apache.thrift.protocol.TField OTHER_ID1_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "otherId1", org.apache.thrift.protocol.TType.I32, (short) 5);
+    private static final org.apache.thrift.protocol.TField OTHER_ID2_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "otherId2", org.apache.thrift.protocol.TType.I32, (short) 6);
+    private static final org.apache.thrift.protocol.TField OTHER_ID3_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "otherId3", org.apache.thrift.protocol.TType.I32, (short) 7);
+    private static final org.apache.thrift.protocol.TField SOME_OBJECT_FIELD_DESC =
+            new org.apache.thrift.protocol.TField(
+                    "someObject", org.apache.thrift.protocol.TType.STRING, (short) 8);
+    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY =
+            new MyPojoStandardSchemeFactory();
+    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY =
+            new MyPojoTupleSchemeFactory();
+    // isset id assignments
+    private static final int __ID_ISSET_ID = 0;
+    private static final int __OTHERID1_ISSET_ID = 1;
+    private static final int __OTHERID2_ISSET_ID = 2;
+    private static final int __OTHERID3_ISSET_ID = 3;
+    private static final _Fields optionals[] = {_Fields.SOME_OBJECT};
 
     static {
-      for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
+        java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap =
+                new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
+                        _Fields.class);
+        tmpMap.put(
+                _Fields.ID,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "id",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.I32, "int")));
+        tmpMap.put(
+                _Fields.NAME,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "name",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.STRING)));
+        tmpMap.put(
+                _Fields.OPERATION_NAME,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "operationName",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.ListMetaData(
+                                org.apache.thrift.protocol.TType.LIST,
+                                new org.apache.thrift.meta_data.FieldValueMetaData(
+                                        org.apache.thrift.protocol.TType.STRING))));
+        tmpMap.put(
+                _Fields.OPERATIONS,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "operations",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.ListMetaData(
+                                org.apache.thrift.protocol.TType.LIST,
+                                new org.apache.thrift.meta_data.FieldValueMetaData(
+                                        org.apache.thrift.protocol.TType.STRUCT, "MyOperation"))));
+        tmpMap.put(
+                _Fields.OTHER_ID1,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "otherId1",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.I32, "int")));
+        tmpMap.put(
+                _Fields.OTHER_ID2,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "otherId2",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.I32, "int")));
+        tmpMap.put(
+                _Fields.OTHER_ID3,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "otherId3",
+                        org.apache.thrift.TFieldRequirementType.DEFAULT,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.I32, "int")));
+        tmpMap.put(
+                _Fields.SOME_OBJECT,
+                new org.apache.thrift.meta_data.FieldMetaData(
+                        "someObject",
+                        org.apache.thrift.TFieldRequirementType.OPTIONAL,
+                        new org.apache.thrift.meta_data.FieldValueMetaData(
+                                org.apache.thrift.protocol.TType.STRING)));
+        metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
+        org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(MyPojo.class, metaDataMap);
+    }
+
+    public int id; // required
+    public @org.apache.thrift.annotation.Nullable java.lang.String name; // required
+    public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String>
+            operationName; // required
+    public @org.apache.thrift.annotation.Nullable java.util.List<MyOperation>
+            operations; // required
+    public int otherId1; // required
+    public int otherId2; // required
+    public int otherId3; // required
+    public @org.apache.thrift.annotation.Nullable java.lang.String someObject; // optional
+    private byte __isset_bitfield = 0;
+
+    public MyPojo() {}
+
+    public MyPojo(
+            int id,
+            java.lang.String name,
+            java.util.List<java.lang.String> operationName,
+            java.util.List<MyOperation> operations,
+            int otherId1,
+            int otherId2,
+            int otherId3) {
+        this();
+        this.id = id;
+        setIdIsSet(true);
+        this.name = name;
+        this.operationName = operationName;
+        this.operations = operations;
+        this.otherId1 = otherId1;
+        setOtherId1IsSet(true);
+        this.otherId2 = otherId2;
+        setOtherId2IsSet(true);
+        this.otherId3 = otherId3;
+        setOtherId3IsSet(true);
+    }
+
+    /** Performs a deep copy on <i>other</i>. */
+    public MyPojo(MyPojo other) {
+        __isset_bitfield = other.__isset_bitfield;
+        this.id = other.id;
+        if (other.isSetName()) {
+            this.name = other.name;
+        }
+        if (other.isSetOperationName()) {
+            java.util.List<java.lang.String> __this__operationName =
+                    new java.util.ArrayList<java.lang.String>(other.operationName);
+            this.operationName = __this__operationName;
+        }
+        if (other.isSetOperations()) {
+            java.util.List<MyOperation> __this__operations =
+                    new java.util.ArrayList<MyOperation>(other.operations.size());
+            for (MyOperation other_element : other.operations) {
+                __this__operations.add(new MyOperation(other_element));
+            }
+            this.operations = __this__operations;
+        }
+        this.otherId1 = other.otherId1;
+        this.otherId2 = other.otherId2;
+        this.otherId3 = other.otherId3;
+        if (other.isSetSomeObject()) {
+            this.someObject = other.someObject;
+        }
+    }
+
+    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(
+            org.apache.thrift.protocol.TProtocol proto) {
+        return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme())
+                        ? STANDARD_SCHEME_FACTORY
+                        : TUPLE_SCHEME_FACTORY)
+                .getScheme();
+    }
+
+    public MyPojo deepCopy() {
+        return new MyPojo(this);
+    }
+
+    @Override
+    public void clear() {
+        setIdIsSet(false);
+        this.id = 0;
+        this.name = null;
+        this.operationName = null;
+        this.operations = null;
+        setOtherId1IsSet(false);
+        this.otherId1 = 0;
+        setOtherId2IsSet(false);
+        this.otherId2 = 0;
+        setOtherId3IsSet(false);
+        this.otherId3 = 0;
+        this.someObject = null;
+    }
+
+    public int getId() {
+        return this.id;
+    }
+
+    public MyPojo setId(int id) {
+        this.id = id;
+        setIdIsSet(true);
+        return this;
+    }
+
+    public void unsetId() {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
+    }
+
+    /** Returns true if field id is set (has been assigned a value) and false otherwise */
+    public boolean isSetId() {
+        return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
+    }
+
+    public void setIdIsSet(boolean value) {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
-     */
     @org.apache.thrift.annotation.Nullable
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // ID
-          return ID;
-        case 2: // NAME
-          return NAME;
-        case 3: // OPERATION_NAME
-          return OPERATION_NAME;
-        case 4: // OPERATIONS
-          return OPERATIONS;
-        case 5: // OTHER_ID1
-          return OTHER_ID1;
-        case 6: // OTHER_ID2
-          return OTHER_ID2;
-        case 7: // OTHER_ID3
-          return OTHER_ID3;
-        case 8: // SOME_OBJECT
-          return SOME_OBJECT;
-        default:
-          return null;
-      }
+    public java.lang.String getName() {
+        return this.name;
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
+    public MyPojo setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
+        this.name = name;
+        return this;
+    }
+
+    public void unsetName() {
+        this.name = null;
+    }
+
+    /** Returns true if field name is set (has been assigned a value) and false otherwise */
+    public boolean isSetName() {
+        return this.name != null;
+    }
+
+    public void setNameIsSet(boolean value) {
+        if (!value) {
+            this.name = null;
+        }
+    }
+
+    public int getOperationNameSize() {
+        return (this.operationName == null) ? 0 : this.operationName.size();
+    }
+
+    @org.apache.thrift.annotation.Nullable
+    public java.util.Iterator<java.lang.String> getOperationNameIterator() {
+        return (this.operationName == null) ? null : this.operationName.iterator();
+    }
+
+    public void addToOperationName(java.lang.String elem) {
+        if (this.operationName == null) {
+            this.operationName = new java.util.ArrayList<java.lang.String>();
+        }
+        this.operationName.add(elem);
+    }
+
+    @org.apache.thrift.annotation.Nullable
+    public java.util.List<java.lang.String> getOperationName() {
+        return this.operationName;
+    }
+
+    public MyPojo setOperationName(
+            @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> operationName) {
+        this.operationName = operationName;
+        return this;
+    }
+
+    public void unsetOperationName() {
+        this.operationName = null;
     }
 
     /**
-     * Find the _Fields constant that matches name, or null if its not found.
+     * Returns true if field operationName is set (has been assigned a value) and false otherwise
      */
+    public boolean isSetOperationName() {
+        return this.operationName != null;
+    }
+
+    public void setOperationNameIsSet(boolean value) {
+        if (!value) {
+            this.operationName = null;
+        }
+    }
+
+    public int getOperationsSize() {
+        return (this.operations == null) ? 0 : this.operations.size();
+    }
+
     @org.apache.thrift.annotation.Nullable
-    public static _Fields findByName(java.lang.String name) {
-      return byName.get(name);
-    }
-
-    private final short _thriftId;
-    private final java.lang.String _fieldName;
-
-    _Fields(short thriftId, java.lang.String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public java.lang.String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  private static final int __ID_ISSET_ID = 0;
-  private static final int __OTHERID1_ISSET_ID = 1;
-  private static final int __OTHERID2_ISSET_ID = 2;
-  private static final int __OTHERID3_ISSET_ID = 3;
-  private byte __isset_bitfield = 0;
-  private static final _Fields optionals[] = {_Fields.SOME_OBJECT};
-  public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32        , "int")));
-    tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.OPERATION_NAME, new org.apache.thrift.meta_data.FieldMetaData("operationName", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
-    tmpMap.put(_Fields.OPERATIONS, new org.apache.thrift.meta_data.FieldMetaData("operations", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
-            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT            , "MyOperation"))));
-    tmpMap.put(_Fields.OTHER_ID1, new org.apache.thrift.meta_data.FieldMetaData("otherId1", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32        , "int")));
-    tmpMap.put(_Fields.OTHER_ID2, new org.apache.thrift.meta_data.FieldMetaData("otherId2", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32        , "int")));
-    tmpMap.put(_Fields.OTHER_ID3, new org.apache.thrift.meta_data.FieldMetaData("otherId3", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32        , "int")));
-    tmpMap.put(_Fields.SOME_OBJECT, new org.apache.thrift.meta_data.FieldMetaData("someObject", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(MyPojo.class, metaDataMap);
-  }
-
-  public MyPojo() {
-  }
-
-  public MyPojo(
-    int id,
-    java.lang.String name,
-    java.util.List<java.lang.String> operationName,
-    java.util.List<MyOperation> operations,
-    int otherId1,
-    int otherId2,
-    int otherId3)
-  {
-    this();
-    this.id = id;
-    setIdIsSet(true);
-    this.name = name;
-    this.operationName = operationName;
-    this.operations = operations;
-    this.otherId1 = otherId1;
-    setOtherId1IsSet(true);
-    this.otherId2 = otherId2;
-    setOtherId2IsSet(true);
-    this.otherId3 = otherId3;
-    setOtherId3IsSet(true);
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public MyPojo(MyPojo other) {
-    __isset_bitfield = other.__isset_bitfield;
-    this.id = other.id;
-    if (other.isSetName()) {
-      this.name = other.name;
-    }
-    if (other.isSetOperationName()) {
-      java.util.List<java.lang.String> __this__operationName = new java.util.ArrayList<java.lang.String>(other.operationName);
-      this.operationName = __this__operationName;
-    }
-    if (other.isSetOperations()) {
-      java.util.List<MyOperation> __this__operations = new java.util.ArrayList<MyOperation>(other.operations.size());
-      for (MyOperation other_element : other.operations) {
-        __this__operations.add(new MyOperation(other_element));
-      }
-      this.operations = __this__operations;
-    }
-    this.otherId1 = other.otherId1;
-    this.otherId2 = other.otherId2;
-    this.otherId3 = other.otherId3;
-    if (other.isSetSomeObject()) {
-      this.someObject = other.someObject;
-    }
-  }
-
-  public MyPojo deepCopy() {
-    return new MyPojo(this);
-  }
-
-  @Override
-  public void clear() {
-    setIdIsSet(false);
-    this.id = 0;
-    this.name = null;
-    this.operationName = null;
-    this.operations = null;
-    setOtherId1IsSet(false);
-    this.otherId1 = 0;
-    setOtherId2IsSet(false);
-    this.otherId2 = 0;
-    setOtherId3IsSet(false);
-    this.otherId3 = 0;
-    this.someObject = null;
-  }
-
-  public int getId() {
-    return this.id;
-  }
-
-  public MyPojo setId(int id) {
-    this.id = id;
-    setIdIsSet(true);
-    return this;
-  }
-
-  public void unsetId() {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID);
-  }
-
-  /** Returns true if field id is set (has been assigned a value) and false otherwise */
-  public boolean isSetId() {
-    return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID);
-  }
-
-  public void setIdIsSet(boolean value) {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value);
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.lang.String getName() {
-    return this.name;
-  }
-
-  public MyPojo setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
-    this.name = name;
-    return this;
-  }
-
-  public void unsetName() {
-    this.name = null;
-  }
-
-  /** Returns true if field name is set (has been assigned a value) and false otherwise */
-  public boolean isSetName() {
-    return this.name != null;
-  }
-
-  public void setNameIsSet(boolean value) {
-    if (!value) {
-      this.name = null;
-    }
-  }
-
-  public int getOperationNameSize() {
-    return (this.operationName == null) ? 0 : this.operationName.size();
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.util.Iterator<java.lang.String> getOperationNameIterator() {
-    return (this.operationName == null) ? null : this.operationName.iterator();
-  }
-
-  public void addToOperationName(java.lang.String elem) {
-    if (this.operationName == null) {
-      this.operationName = new java.util.ArrayList<java.lang.String>();
-    }
-    this.operationName.add(elem);
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.util.List<java.lang.String> getOperationName() {
-    return this.operationName;
-  }
-
-  public MyPojo setOperationName(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> operationName) {
-    this.operationName = operationName;
-    return this;
-  }
-
-  public void unsetOperationName() {
-    this.operationName = null;
-  }
-
-  /** Returns true if field operationName is set (has been assigned a value) and false otherwise */
-  public boolean isSetOperationName() {
-    return this.operationName != null;
-  }
-
-  public void setOperationNameIsSet(boolean value) {
-    if (!value) {
-      this.operationName = null;
-    }
-  }
-
-  public int getOperationsSize() {
-    return (this.operations == null) ? 0 : this.operations.size();
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.util.Iterator<MyOperation> getOperationsIterator() {
-    return (this.operations == null) ? null : this.operations.iterator();
-  }
-
-  public void addToOperations(MyOperation elem) {
-    if (this.operations == null) {
-      this.operations = new java.util.ArrayList<MyOperation>();
-    }
-    this.operations.add(elem);
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.util.List<MyOperation> getOperations() {
-    return this.operations;
-  }
-
-  public MyPojo setOperations(@org.apache.thrift.annotation.Nullable java.util.List<MyOperation> operations) {
-    this.operations = operations;
-    return this;
-  }
-
-  public void unsetOperations() {
-    this.operations = null;
-  }
-
-  /** Returns true if field operations is set (has been assigned a value) and false otherwise */
-  public boolean isSetOperations() {
-    return this.operations != null;
-  }
-
-  public void setOperationsIsSet(boolean value) {
-    if (!value) {
-      this.operations = null;
-    }
-  }
-
-  public int getOtherId1() {
-    return this.otherId1;
-  }
-
-  public MyPojo setOtherId1(int otherId1) {
-    this.otherId1 = otherId1;
-    setOtherId1IsSet(true);
-    return this;
-  }
-
-  public void unsetOtherId1() {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID1_ISSET_ID);
-  }
-
-  /** Returns true if field otherId1 is set (has been assigned a value) and false otherwise */
-  public boolean isSetOtherId1() {
-    return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID1_ISSET_ID);
-  }
-
-  public void setOtherId1IsSet(boolean value) {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __OTHERID1_ISSET_ID, value);
-  }
-
-  public int getOtherId2() {
-    return this.otherId2;
-  }
-
-  public MyPojo setOtherId2(int otherId2) {
-    this.otherId2 = otherId2;
-    setOtherId2IsSet(true);
-    return this;
-  }
-
-  public void unsetOtherId2() {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID2_ISSET_ID);
-  }
-
-  /** Returns true if field otherId2 is set (has been assigned a value) and false otherwise */
-  public boolean isSetOtherId2() {
-    return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID2_ISSET_ID);
-  }
-
-  public void setOtherId2IsSet(boolean value) {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __OTHERID2_ISSET_ID, value);
-  }
-
-  public int getOtherId3() {
-    return this.otherId3;
-  }
-
-  public MyPojo setOtherId3(int otherId3) {
-    this.otherId3 = otherId3;
-    setOtherId3IsSet(true);
-    return this;
-  }
-
-  public void unsetOtherId3() {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID3_ISSET_ID);
-  }
-
-  /** Returns true if field otherId3 is set (has been assigned a value) and false otherwise */
-  public boolean isSetOtherId3() {
-    return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID3_ISSET_ID);
-  }
-
-  public void setOtherId3IsSet(boolean value) {
-    __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __OTHERID3_ISSET_ID, value);
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.lang.String getSomeObject() {
-    return this.someObject;
-  }
-
-  public MyPojo setSomeObject(@org.apache.thrift.annotation.Nullable java.lang.String someObject) {
-    this.someObject = someObject;
-    return this;
-  }
-
-  public void unsetSomeObject() {
-    this.someObject = null;
-  }
-
-  /** Returns true if field someObject is set (has been assigned a value) and false otherwise */
-  public boolean isSetSomeObject() {
-    return this.someObject != null;
-  }
-
-  public void setSomeObjectIsSet(boolean value) {
-    if (!value) {
-      this.someObject = null;
-    }
-  }
-
-  public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
-    switch (field) {
-    case ID:
-      if (value == null) {
-        unsetId();
-      } else {
-        setId((java.lang.Integer)value);
-      }
-      break;
-
-    case NAME:
-      if (value == null) {
-        unsetName();
-      } else {
-        setName((java.lang.String)value);
-      }
-      break;
-
-    case OPERATION_NAME:
-      if (value == null) {
-        unsetOperationName();
-      } else {
-        setOperationName((java.util.List<java.lang.String>)value);
-      }
-      break;
-
-    case OPERATIONS:
-      if (value == null) {
-        unsetOperations();
-      } else {
-        setOperations((java.util.List<MyOperation>)value);
-      }
-      break;
-
-    case OTHER_ID1:
-      if (value == null) {
-        unsetOtherId1();
-      } else {
-        setOtherId1((java.lang.Integer)value);
-      }
-      break;
-
-    case OTHER_ID2:
-      if (value == null) {
-        unsetOtherId2();
-      } else {
-        setOtherId2((java.lang.Integer)value);
-      }
-      break;
-
-    case OTHER_ID3:
-      if (value == null) {
-        unsetOtherId3();
-      } else {
-        setOtherId3((java.lang.Integer)value);
-      }
-      break;
-
-    case SOME_OBJECT:
-      if (value == null) {
-        unsetSomeObject();
-      } else {
-        setSomeObject((java.lang.String)value);
-      }
-      break;
-
-    }
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public java.lang.Object getFieldValue(_Fields field) {
-    switch (field) {
-    case ID:
-      return getId();
-
-    case NAME:
-      return getName();
-
-    case OPERATION_NAME:
-      return getOperationName();
-
-    case OPERATIONS:
-      return getOperations();
-
-    case OTHER_ID1:
-      return getOtherId1();
-
-    case OTHER_ID2:
-      return getOtherId2();
-
-    case OTHER_ID3:
-      return getOtherId3();
-
-    case SOME_OBJECT:
-      return getSomeObject();
-
-    }
-    throw new java.lang.IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new java.lang.IllegalArgumentException();
-    }
-
-    switch (field) {
-    case ID:
-      return isSetId();
-    case NAME:
-      return isSetName();
-    case OPERATION_NAME:
-      return isSetOperationName();
-    case OPERATIONS:
-      return isSetOperations();
-    case OTHER_ID1:
-      return isSetOtherId1();
-    case OTHER_ID2:
-      return isSetOtherId2();
-    case OTHER_ID3:
-      return isSetOtherId3();
-    case SOME_OBJECT:
-      return isSetSomeObject();
-    }
-    throw new java.lang.IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(java.lang.Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof MyPojo)
-      return this.equals((MyPojo)that);
-    return false;
-  }
-
-  public boolean equals(MyPojo that) {
-    if (that == null)
-      return false;
-    if (this == that)
-      return true;
-
-    boolean this_present_id = true;
-    boolean that_present_id = true;
-    if (this_present_id || that_present_id) {
-      if (!(this_present_id && that_present_id))
-        return false;
-      if (this.id != that.id)
-        return false;
+    public java.util.Iterator<MyOperation> getOperationsIterator() {
+        return (this.operations == null) ? null : this.operations.iterator();
     }
 
-    boolean this_present_name = true && this.isSetName();
-    boolean that_present_name = true && that.isSetName();
-    if (this_present_name || that_present_name) {
-      if (!(this_present_name && that_present_name))
-        return false;
-      if (!this.name.equals(that.name))
-        return false;
+    public void addToOperations(MyOperation elem) {
+        if (this.operations == null) {
+            this.operations = new java.util.ArrayList<MyOperation>();
+        }
+        this.operations.add(elem);
     }
 
-    boolean this_present_operationName = true && this.isSetOperationName();
-    boolean that_present_operationName = true && that.isSetOperationName();
-    if (this_present_operationName || that_present_operationName) {
-      if (!(this_present_operationName && that_present_operationName))
-        return false;
-      if (!this.operationName.equals(that.operationName))
-        return false;
+    @org.apache.thrift.annotation.Nullable
+    public java.util.List<MyOperation> getOperations() {
+        return this.operations;
     }
 
-    boolean this_present_operations = true && this.isSetOperations();
-    boolean that_present_operations = true && that.isSetOperations();
-    if (this_present_operations || that_present_operations) {
-      if (!(this_present_operations && that_present_operations))
-        return false;
-      if (!this.operations.equals(that.operations))
-        return false;
+    public MyPojo setOperations(
+            @org.apache.thrift.annotation.Nullable java.util.List<MyOperation> operations) {
+        this.operations = operations;
+        return this;
     }
 
-    boolean this_present_otherId1 = true;
-    boolean that_present_otherId1 = true;
-    if (this_present_otherId1 || that_present_otherId1) {
-      if (!(this_present_otherId1 && that_present_otherId1))
-        return false;
-      if (this.otherId1 != that.otherId1)
-        return false;
+    public void unsetOperations() {
+        this.operations = null;
     }
 
-    boolean this_present_otherId2 = true;
-    boolean that_present_otherId2 = true;
-    if (this_present_otherId2 || that_present_otherId2) {
-      if (!(this_present_otherId2 && that_present_otherId2))
-        return false;
-      if (this.otherId2 != that.otherId2)
-        return false;
+    /** Returns true if field operations is set (has been assigned a value) and false otherwise */
+    public boolean isSetOperations() {
+        return this.operations != null;
     }
 
-    boolean this_present_otherId3 = true;
-    boolean that_present_otherId3 = true;
-    if (this_present_otherId3 || that_present_otherId3) {
-      if (!(this_present_otherId3 && that_present_otherId3))
-        return false;
-      if (this.otherId3 != that.otherId3)
-        return false;
+    public void setOperationsIsSet(boolean value) {
+        if (!value) {
+            this.operations = null;
+        }
     }
 
-    boolean this_present_someObject = true && this.isSetSomeObject();
-    boolean that_present_someObject = true && that.isSetSomeObject();
-    if (this_present_someObject || that_present_someObject) {
-      if (!(this_present_someObject && that_present_someObject))
-        return false;
-      if (!this.someObject.equals(that.someObject))
-        return false;
+    public int getOtherId1() {
+        return this.otherId1;
     }
 
-    return true;
-  }
+    public MyPojo setOtherId1(int otherId1) {
+        this.otherId1 = otherId1;
+        setOtherId1IsSet(true);
+        return this;
+    }
 
-  @Override
-  public int hashCode() {
-    int hashCode = 1;
+    public void unsetOtherId1() {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID1_ISSET_ID);
+    }
 
-    hashCode = hashCode * 8191 + id;
+    /** Returns true if field otherId1 is set (has been assigned a value) and false otherwise */
+    public boolean isSetOtherId1() {
+        return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID1_ISSET_ID);
+    }
 
-    hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);
-    if (isSetName())
-      hashCode = hashCode * 8191 + name.hashCode();
+    public void setOtherId1IsSet(boolean value) {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.setBit(
+                        __isset_bitfield, __OTHERID1_ISSET_ID, value);
+    }
 
-    hashCode = hashCode * 8191 + ((isSetOperationName()) ? 131071 : 524287);
-    if (isSetOperationName())
-      hashCode = hashCode * 8191 + operationName.hashCode();
+    public int getOtherId2() {
+        return this.otherId2;
+    }
 
-    hashCode = hashCode * 8191 + ((isSetOperations()) ? 131071 : 524287);
-    if (isSetOperations())
-      hashCode = hashCode * 8191 + operations.hashCode();
+    public MyPojo setOtherId2(int otherId2) {
+        this.otherId2 = otherId2;
+        setOtherId2IsSet(true);
+        return this;
+    }
 
-    hashCode = hashCode * 8191 + otherId1;
+    public void unsetOtherId2() {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID2_ISSET_ID);
+    }
 
-    hashCode = hashCode * 8191 + otherId2;
+    /** Returns true if field otherId2 is set (has been assigned a value) and false otherwise */
+    public boolean isSetOtherId2() {
+        return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID2_ISSET_ID);
+    }
 
-    hashCode = hashCode * 8191 + otherId3;
+    public void setOtherId2IsSet(boolean value) {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.setBit(
+                        __isset_bitfield, __OTHERID2_ISSET_ID, value);
+    }
 
-    hashCode = hashCode * 8191 + ((isSetSomeObject()) ? 131071 : 524287);
-    if (isSetSomeObject())
-      hashCode = hashCode * 8191 + someObject.hashCode();
+    public int getOtherId3() {
+        return this.otherId3;
+    }
 
-    return hashCode;
-  }
+    public MyPojo setOtherId3(int otherId3) {
+        this.otherId3 = otherId3;
+        setOtherId3IsSet(true);
+        return this;
+    }
 
-  @Override
-  public int compareTo(MyPojo other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
+    public void unsetOtherId3() {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OTHERID3_ISSET_ID);
     }
 
-    int lastComparison = 0;
+    /** Returns true if field otherId3 is set (has been assigned a value) and false otherwise */
+    public boolean isSetOtherId3() {
+        return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OTHERID3_ISSET_ID);
+    }
 
-    lastComparison = java.lang.Boolean.valueOf(isSetId()).compareTo(other.isSetId());
-    if (lastComparison != 0) {
-      return lastComparison;
+    public void setOtherId3IsSet(boolean value) {
+        __isset_bitfield =
+                org.apache.thrift.EncodingUtils.setBit(
+                        __isset_bitfield, __OTHERID3_ISSET_ID, value);
     }
-    if (isSetId()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetName()).compareTo(other.isSetName());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetName()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetOperationName()).compareTo(other.isSetOperationName());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetOperationName()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operationName, other.operationName);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetOperations()).compareTo(other.isSetOperations());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetOperations()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operations, other.operations);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetOtherId1()).compareTo(other.isSetOtherId1());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetOtherId1()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId1, other.otherId1);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetOtherId2()).compareTo(other.isSetOtherId2());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetOtherId2()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId2, other.otherId2);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetOtherId3()).compareTo(other.isSetOtherId3());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetOtherId3()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId3, other.otherId3);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = java.lang.Boolean.valueOf(isSetSomeObject()).compareTo(other.isSetSomeObject());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetSomeObject()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.someObject, other.someObject);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  @org.apache.thrift.annotation.Nullable
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    scheme(iprot).read(iprot, this);
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    scheme(oprot).write(oprot, this);
-  }
-
-  @Override
-  public java.lang.String toString() {
-    java.lang.StringBuilder sb = new java.lang.StringBuilder("MyPojo(");
-    boolean first = true;
-
-    sb.append("id:");
-    sb.append(this.id);
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("name:");
-    if (this.name == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.name);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("operationName:");
-    if (this.operationName == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.operationName);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("operations:");
-    if (this.operations == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.operations);
-    }
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("otherId1:");
-    sb.append(this.otherId1);
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("otherId2:");
-    sb.append(this.otherId2);
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("otherId3:");
-    sb.append(this.otherId3);
-    first = false;
-    if (isSetSomeObject()) {
-      if (!first) sb.append(", ");
-      sb.append("someObject:");
-      if (this.someObject == null) {
-        sb.append("null");
-      } else {
-        sb.append(this.someObject);
-      }
-      first = false;
-    }
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-    // check for sub-struct validity
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
-    try {
-      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-      __isset_bitfield = 0;
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private static class MyPojoStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-    public MyPojoStandardScheme getScheme() {
-      return new MyPojoStandardScheme();
-    }
-  }
-
-  private static class MyPojoStandardScheme extends org.apache.thrift.scheme.StandardScheme<MyPojo> {
-
-    public void read(org.apache.thrift.protocol.TProtocol iprot, MyPojo struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TField schemeField;
-      iprot.readStructBegin();
-      while (true)
-      {
-        schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
-          break;
+
+    @org.apache.thrift.annotation.Nullable
+    public java.lang.String getSomeObject() {
+        return this.someObject;
+    }
+
+    public MyPojo setSomeObject(
+            @org.apache.thrift.annotation.Nullable java.lang.String someObject) {
+        this.someObject = someObject;
+        return this;
+    }
+
+    public void unsetSomeObject() {
+        this.someObject = null;
+    }
+
+    /** Returns true if field someObject is set (has been assigned a value) and false otherwise */
+    public boolean isSetSomeObject() {
+        return this.someObject != null;
+    }
+
+    public void setSomeObjectIsSet(boolean value) {
+        if (!value) {
+            this.someObject = null;
         }
-        switch (schemeField.id) {
-          case 1: // ID
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.id = iprot.readI32();
-              struct.setIdIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 2: // NAME
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.name = iprot.readString();
-              struct.setNameIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 3: // OPERATION_NAME
-            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
-              {
-                org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
-                struct.operationName = new java.util.ArrayList<java.lang.String>(_list0.size);
-                @org.apache.thrift.annotation.Nullable java.lang.String _elem1;
-                for (int _i2 = 0; _i2 < _list0.size; ++_i2)
-                {
-                  _elem1 = iprot.readString();
-                  struct.operationName.add(_elem1);
+    }
+
+    public void setFieldValue(
+            _Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
+        switch (field) {
+            case ID:
+                if (value == null) {
+                    unsetId();
+                } else {
+                    setId((java.lang.Integer) value);
                 }
-                iprot.readListEnd();
-              }
-              struct.setOperationNameIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
-            }
-            break;
-          case 4: // OPERATIONS
-            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
-              {
-                org.apache.thrift.protocol.TList _list3 = iprot.readListBegin();
-                struct.operations = new java.util.ArrayList<MyOperation>(_list3.size);
-                @org.apache.thrift.annotation.Nullable MyOperation _elem4;
-                for (int _i5 = 0; _i5 < _list3.size; ++_i5)
-                {
-                  _elem4 = new MyOperation();
-                  _elem4.read(iprot);
-                  struct.operations.add(_elem4);
+                break;
+
+            case NAME:
+                if (value == null) {
+                    unsetName();
+                } else {
+                    setName((java.lang.String) value);
                 }
-                iprot.readListEnd();
-              }
-              struct.setOperationsIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                break;
+
+            case OPERATION_NAME:
+                if (value == null) {
+                    unsetOperationName();
+                } else {
+                    setOperationName((java.util.List<java.lang.String>) value);
+                }
+                break;
+
+            case OPERATIONS:
+                if (value == null) {
+                    unsetOperations();
+                } else {
+                    setOperations((java.util.List<MyOperation>) value);
+                }
+                break;
+
+            case OTHER_ID1:
+                if (value == null) {
+                    unsetOtherId1();
+                } else {
+                    setOtherId1((java.lang.Integer) value);
+                }
+                break;
+
+            case OTHER_ID2:
+                if (value == null) {
+                    unsetOtherId2();
+                } else {
+                    setOtherId2((java.lang.Integer) value);
+                }
+                break;
+
+            case OTHER_ID3:
+                if (value == null) {
+                    unsetOtherId3();
+                } else {
+                    setOtherId3((java.lang.Integer) value);
+                }
+                break;
+
+            case SOME_OBJECT:
+                if (value == null) {
+                    unsetSomeObject();
+                } else {
+                    setSomeObject((java.lang.String) value);
+                }
+                break;
+        }
+    }
+
+    @org.apache.thrift.annotation.Nullable
+    public java.lang.Object getFieldValue(_Fields field) {
+        switch (field) {
+            case ID:
+                return getId();
+
+            case NAME:
+                return getName();
+
+            case OPERATION_NAME:
+                return getOperationName();
+
+            case OPERATIONS:
+                return getOperations();
+
+            case OTHER_ID1:
+                return getOtherId1();
+
+            case OTHER_ID2:
+                return getOtherId2();
+
+            case OTHER_ID3:
+                return getOtherId3();
+
+            case SOME_OBJECT:
+                return getSomeObject();
+        }
+        throw new java.lang.IllegalStateException();
+    }
+
+    /**
+     * Returns true if field corresponding to fieldID is set (has been assigned a value) and false
+     * otherwise
+     */
+    public boolean isSet(_Fields field) {
+        if (field == null) {
+            throw new java.lang.IllegalArgumentException();
+        }
+
+        switch (field) {
+            case ID:
+                return isSetId();
+            case NAME:
+                return isSetName();
+            case OPERATION_NAME:
+                return isSetOperationName();
+            case OPERATIONS:
+                return isSetOperations();
+            case OTHER_ID1:
+                return isSetOtherId1();
+            case OTHER_ID2:
+                return isSetOtherId2();
+            case OTHER_ID3:
+                return isSetOtherId3();
+            case SOME_OBJECT:
+                return isSetSomeObject();
+        }
+        throw new java.lang.IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(java.lang.Object that) {
+        if (that == null) return false;
+        if (that instanceof MyPojo) return this.equals((MyPojo) that);
+        return false;
+    }
+
+    public boolean equals(MyPojo that) {
+        if (that == null) return false;
+        if (this == that) return true;
+
+        boolean this_present_id = true;
+        boolean that_present_id = true;
+        if (this_present_id || that_present_id) {
+            if (!(this_present_id && that_present_id)) return false;
+            if (this.id != that.id) return false;
+        }
+
+        boolean this_present_name = true && this.isSetName();
+        boolean that_present_name = true && that.isSetName();
+        if (this_present_name || that_present_name) {
+            if (!(this_present_name && that_present_name)) return false;
+            if (!this.name.equals(that.name)) return false;
+        }
+
+        boolean this_present_operationName = true && this.isSetOperationName();
+        boolean that_present_operationName = true && that.isSetOperationName();
+        if (this_present_operationName || that_present_operationName) {
+            if (!(this_present_operationName && that_present_operationName)) return false;
+            if (!this.operationName.equals(that.operationName)) return false;
+        }
+
+        boolean this_present_operations = true && this.isSetOperations();
+        boolean that_present_operations = true && that.isSetOperations();
+        if (this_present_operations || that_present_operations) {
+            if (!(this_present_operations && that_present_operations)) return false;
+            if (!this.operations.equals(that.operations)) return false;
+        }
+
+        boolean this_present_otherId1 = true;
+        boolean that_present_otherId1 = true;
+        if (this_present_otherId1 || that_present_otherId1) {
+            if (!(this_present_otherId1 && that_present_otherId1)) return false;
+            if (this.otherId1 != that.otherId1) return false;
+        }
+
+        boolean this_present_otherId2 = true;
+        boolean that_present_otherId2 = true;
+        if (this_present_otherId2 || that_present_otherId2) {
+            if (!(this_present_otherId2 && that_present_otherId2)) return false;
+            if (this.otherId2 != that.otherId2) return false;
+        }
+
+        boolean this_present_otherId3 = true;
+        boolean that_present_otherId3 = true;
+        if (this_present_otherId3 || that_present_otherId3) {
+            if (!(this_present_otherId3 && that_present_otherId3)) return false;
+            if (this.otherId3 != that.otherId3) return false;
+        }
+
+        boolean this_present_someObject = true && this.isSetSomeObject();
+        boolean that_present_someObject = true && that.isSetSomeObject();
+        if (this_present_someObject || that_present_someObject) {
+            if (!(this_present_someObject && that_present_someObject)) return false;
+            if (!this.someObject.equals(that.someObject)) return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        int hashCode = 1;
+
+        hashCode = hashCode * 8191 + id;
+
+        hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);
+        if (isSetName()) hashCode = hashCode * 8191 + name.hashCode();
+
+        hashCode = hashCode * 8191 + ((isSetOperationName()) ? 131071 : 524287);
+        if (isSetOperationName()) hashCode = hashCode * 8191 + operationName.hashCode();
+
+        hashCode = hashCode * 8191 + ((isSetOperations()) ? 131071 : 524287);
+        if (isSetOperations()) hashCode = hashCode * 8191 + operations.hashCode();
+
+        hashCode = hashCode * 8191 + otherId1;
+
+        hashCode = hashCode * 8191 + otherId2;
+
+        hashCode = hashCode * 8191 + otherId3;
+
+        hashCode = hashCode * 8191 + ((isSetSomeObject()) ? 131071 : 524287);
+        if (isSetSomeObject()) hashCode = hashCode * 8191 + someObject.hashCode();
+
+        return hashCode;
+    }
+
+    @Override
+    public int compareTo(MyPojo other) {
+        if (!getClass().equals(other.getClass())) {
+            return getClass().getName().compareTo(other.getClass().getName());
+        }
+
+        int lastComparison = 0;
+
+        lastComparison = java.lang.Boolean.valueOf(isSetId()).compareTo(other.isSetId());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetId()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          case 5: // OTHER_ID1
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.otherId1 = iprot.readI32();
-              struct.setOtherId1IsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        lastComparison = java.lang.Boolean.valueOf(isSetName()).compareTo(other.isSetName());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetName()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          case 6: // OTHER_ID2
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.otherId2 = iprot.readI32();
-              struct.setOtherId2IsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetOperationName())
+                        .compareTo(other.isSetOperationName());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetOperationName()) {
+            lastComparison =
+                    org.apache.thrift.TBaseHelper.compareTo(
+                            this.operationName, other.operationName);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          case 7: // OTHER_ID3
-            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
-              struct.otherId3 = iprot.readI32();
-              struct.setOtherId3IsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetOperations()).compareTo(other.isSetOperations());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetOperations()) {
+            lastComparison =
+                    org.apache.thrift.TBaseHelper.compareTo(this.operations, other.operations);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          case 8: // SOME_OBJECT
-            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
-              struct.someObject = iprot.readString();
-              struct.setSomeObjectIsSet(true);
-            } else { 
-              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetOtherId1()).compareTo(other.isSetOtherId1());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetOtherId1()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId1, other.otherId1);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
+        }
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetOtherId2()).compareTo(other.isSetOtherId2());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetOtherId2()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId2, other.otherId2);
+            if (lastComparison != 0) {
+                return lastComparison;
             }
-            break;
-          default:
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-
-      // check for required fields of primitive type, which can't be checked in the validate method
-      struct.validate();
-    }
-
-    public void write(org.apache.thrift.protocol.TProtocol oprot, MyPojo struct) throws org.apache.thrift.TException {
-      struct.validate();
-
-      oprot.writeStructBegin(STRUCT_DESC);
-      oprot.writeFieldBegin(ID_FIELD_DESC);
-      oprot.writeI32(struct.id);
-      oprot.writeFieldEnd();
-      if (struct.name != null) {
-        oprot.writeFieldBegin(NAME_FIELD_DESC);
-        oprot.writeString(struct.name);
-        oprot.writeFieldEnd();
-      }
-      if (struct.operationName != null) {
-        oprot.writeFieldBegin(OPERATION_NAME_FIELD_DESC);
-        {
-          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.operationName.size()));
-          for (java.lang.String _iter6 : struct.operationName)
-          {
-            oprot.writeString(_iter6);
-          }
-          oprot.writeListEnd();
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetOtherId3()).compareTo(other.isSetOtherId3());
+        if (lastComparison != 0) {
+            return lastComparison;
         }
-        oprot.writeFieldEnd();
-      }
-      if (struct.operations != null) {
-        oprot.writeFieldBegin(OPERATIONS_FIELD_DESC);
-        {
-          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.operations.size()));
-          for (MyOperation _iter7 : struct.operations)
-          {
-            _iter7.write(oprot);
-          }
-          oprot.writeListEnd();
+        if (isSetOtherId3()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.otherId3, other.otherId3);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
         }
-        oprot.writeFieldEnd();
-      }
-      oprot.writeFieldBegin(OTHER_ID1_FIELD_DESC);
-      oprot.writeI32(struct.otherId1);
-      oprot.writeFieldEnd();
-      oprot.writeFieldBegin(OTHER_ID2_FIELD_DESC);
-      oprot.writeI32(struct.otherId2);
-      oprot.writeFieldEnd();
-      oprot.writeFieldBegin(OTHER_ID3_FIELD_DESC);
-      oprot.writeI32(struct.otherId3);
-      oprot.writeFieldEnd();
-      if (struct.someObject != null) {
-        if (struct.isSetSomeObject()) {
-          oprot.writeFieldBegin(SOME_OBJECT_FIELD_DESC);
-          oprot.writeString(struct.someObject);
-          oprot.writeFieldEnd();
+        lastComparison =
+                java.lang.Boolean.valueOf(isSetSomeObject()).compareTo(other.isSetSomeObject());
+        if (lastComparison != 0) {
+            return lastComparison;
         }
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
+        if (isSetSomeObject()) {
+            lastComparison =
+                    org.apache.thrift.TBaseHelper.compareTo(this.someObject, other.someObject);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
+        }
+        return 0;
     }
 
-  }
+    @org.apache.thrift.annotation.Nullable
+    public _Fields fieldForId(int fieldId) {
+        return _Fields.findByThriftId(fieldId);
+    }
 
-  private static class MyPojoTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
-    public MyPojoTupleScheme getScheme() {
-      return new MyPojoTupleScheme();
+    public void read(org.apache.thrift.protocol.TProtocol iprot)
+            throws org.apache.thrift.TException {
+        scheme(iprot).read(iprot, this);
     }
-  }
 
-  private static class MyPojoTupleScheme extends org.apache.thrift.scheme.TupleScheme<MyPojo> {
+    public void write(org.apache.thrift.protocol.TProtocol oprot)
+            throws org.apache.thrift.TException {
+        scheme(oprot).write(oprot, this);
+    }
 
     @Override
-    public void write(org.apache.thrift.protocol.TProtocol prot, MyPojo struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-      java.util.BitSet optionals = new java.util.BitSet();
-      if (struct.isSetId()) {
-        optionals.set(0);
-      }
-      if (struct.isSetName()) {
-        optionals.set(1);
-      }
-      if (struct.isSetOperationName()) {
-        optionals.set(2);
-      }
-      if (struct.isSetOperations()) {
-        optionals.set(3);
-      }
-      if (struct.isSetOtherId1()) {
-        optionals.set(4);
-      }
-      if (struct.isSetOtherId2()) {
-        optionals.set(5);
-      }
-      if (struct.isSetOtherId3()) {
-        optionals.set(6);
-      }
-      if (struct.isSetSomeObject()) {
-        optionals.set(7);
-      }
-      oprot.writeBitSet(optionals, 8);
-      if (struct.isSetId()) {
-        oprot.writeI32(struct.id);
-      }
-      if (struct.isSetName()) {
-        oprot.writeString(struct.name);
-      }
-      if (struct.isSetOperationName()) {
-        {
-          oprot.writeI32(struct.operationName.size());
-          for (java.lang.String _iter8 : struct.operationName)
-          {
-            oprot.writeString(_iter8);
-          }
+    public java.lang.String toString() {
+        java.lang.StringBuilder sb = new java.lang.StringBuilder("MyPojo(");
+        boolean first = true;
+
+        sb.append("id:");
+        sb.append(this.id);
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("name:");
+        if (this.name == null) {
+            sb.append("null");
+        } else {
+            sb.append(this.name);
+        }
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("operationName:");
+        if (this.operationName == null) {
+            sb.append("null");
+        } else {
+            sb.append(this.operationName);
+        }
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("operations:");
+        if (this.operations == null) {
+            sb.append("null");
+        } else {
+            sb.append(this.operations);
         }
-      }
-      if (struct.isSetOperations()) {
-        {
-          oprot.writeI32(struct.operations.size());
-          for (MyOperation _iter9 : struct.operations)
-          {
-            _iter9.write(oprot);
-          }
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("otherId1:");
+        sb.append(this.otherId1);
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("otherId2:");
+        sb.append(this.otherId2);
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("otherId3:");
+        sb.append(this.otherId3);
+        first = false;
+        if (isSetSomeObject()) {
+            if (!first) sb.append(", ");
+            sb.append("someObject:");
+            if (this.someObject == null) {
+                sb.append("null");
+            } else {
+                sb.append(this.someObject);
+            }
+            first = false;
         }
-      }
-      if (struct.isSetOtherId1()) {
-        oprot.writeI32(struct.otherId1);
-      }
-      if (struct.isSetOtherId2()) {
-        oprot.writeI32(struct.otherId2);
-      }
-      if (struct.isSetOtherId3()) {
-        oprot.writeI32(struct.otherId3);
-      }
-      if (struct.isSetSomeObject()) {
-        oprot.writeString(struct.someObject);
-      }
+        sb.append(")");
+        return sb.toString();
     }
 
-    @Override
-    public void read(org.apache.thrift.protocol.TProtocol prot, MyPojo struct) throws org.apache.thrift.TException {
-      org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
-      java.util.BitSet incoming = iprot.readBitSet(8);
-      if (incoming.get(0)) {
-        struct.id = iprot.readI32();
-        struct.setIdIsSet(true);
-      }
-      if (incoming.get(1)) {
-        struct.name = iprot.readString();
-        struct.setNameIsSet(true);
-      }
-      if (incoming.get(2)) {
-        {
-          org.apache.thrift.protocol.TList _list10 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
-          struct.operationName = new java.util.ArrayList<java.lang.String>(_list10.size);
-          @org.apache.thrift.annotation.Nullable java.lang.String _elem11;
-          for (int _i12 = 0; _i12 < _list10.size; ++_i12)
-          {
-            _elem11 = iprot.readString();
-            struct.operationName.add(_elem11);
-          }
+    public void validate() throws org.apache.thrift.TException {
+        // check for required fields
+        // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+        try {
+            write(
+                    new org.apache.thrift.protocol.TCompactProtocol(
+                            new org.apache.thrift.transport.TIOStreamTransport(out)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
         }
-        struct.setOperationNameIsSet(true);
-      }
-      if (incoming.get(3)) {
-        {
-          org.apache.thrift.protocol.TList _list13 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
-          struct.operations = new java.util.ArrayList<MyOperation>(_list13.size);
-          @org.apache.thrift.annotation.Nullable MyOperation _elem14;
-          for (int _i15 = 0; _i15 < _list13.size; ++_i15)
-          {
-            _elem14 = new MyOperation();
-            _elem14.read(iprot);
-            struct.operations.add(_elem14);
-          }
+    }
+
+    private void readObject(java.io.ObjectInputStream in)
+            throws java.io.IOException, java.lang.ClassNotFoundException {
+        try {
+            // it doesn't seem like you should have to do this, but java serialization is wacky, and
+            // doesn't call the default constructor.
+            __isset_bitfield = 0;
+            read(
+                    new org.apache.thrift.protocol.TCompactProtocol(
+                            new org.apache.thrift.transport.TIOStreamTransport(in)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
         }
-        struct.setOperationsIsSet(true);
-      }
-      if (incoming.get(4)) {
-        struct.otherId1 = iprot.readI32();
-        struct.setOtherId1IsSet(true);
-      }
-      if (incoming.get(5)) {
-        struct.otherId2 = iprot.readI32();
-        struct.setOtherId2IsSet(true);
-      }
-      if (incoming.get(6)) {
-        struct.otherId3 = iprot.readI32();
-        struct.setOtherId3IsSet(true);
-      }
-      if (incoming.get(7)) {
-        struct.someObject = iprot.readString();
-        struct.setSomeObjectIsSet(true);
-      }
-    }
-  }
-
-  private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
-    return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
-  }
-}
+    }
+
+    /**
+     * The set of fields this struct contains, along with convenience methods for finding and
+     * manipulating them.
+     */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+        ID((short) 1, "id"),
+        NAME((short) 2, "name"),
+        OPERATION_NAME((short) 3, "operationName"),
+        OPERATIONS((short) 4, "operations"),
+        OTHER_ID1((short) 5, "otherId1"),
+        OTHER_ID2((short) 6, "otherId2"),
+        OTHER_ID3((short) 7, "otherId3"),
+        SOME_OBJECT((short) 8, "someObject");
+
+        private static final java.util.Map<java.lang.String, _Fields> byName =
+                new java.util.HashMap<java.lang.String, _Fields>();
+
+        static {
+            for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
+                byName.put(field.getFieldName(), field);
+            }
+        }
+
+        private final short _thriftId;
+        private final java.lang.String _fieldName;
 
+        _Fields(short thriftId, java.lang.String fieldName) {
+            _thriftId = thriftId;
+            _fieldName = fieldName;
+        }
+
+        /** Find the _Fields constant that matches fieldId, or null if its not found. */
+        @org.apache.thrift.annotation.Nullable
+        public static _Fields findByThriftId(int fieldId) {
+            switch (fieldId) {
+                case 1: // ID
+                    return ID;
+                case 2: // NAME
+                    return NAME;
+                case 3: // OPERATION_NAME
+                    return OPERATION_NAME;
+                case 4: // OPERATIONS
+                    return OPERATIONS;
+                case 5: // OTHER_ID1
+                    return OTHER_ID1;
+                case 6: // OTHER_ID2
+                    return OTHER_ID2;
+                case 7: // OTHER_ID3
+                    return OTHER_ID3;
+                case 8: // SOME_OBJECT
+                    return SOME_OBJECT;
+                default:
+                    return null;
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, throwing an exception if it is not found.
+         */
+        public static _Fields findByThriftIdOrThrow(int fieldId) {
+            _Fields fields = findByThriftId(fieldId);
+            if (fields == null)
+                throw new java.lang.IllegalArgumentException(
+                        "Field " + fieldId + " doesn't exist!");
+            return fields;
+        }
+
+        /** Find the _Fields constant that matches name, or null if its not found. */
+        @org.apache.thrift.annotation.Nullable
+        public static _Fields findByName(java.lang.String name) {
+            return byName.get(name);
+        }
+
+        public short getThriftFieldId() {
+            return _thriftId;
+        }
+
+        public java.lang.String getFieldName() {
+            return _fieldName;
+        }
+    }
+
+    private static class MyPojoStandardSchemeFactory
+            implements org.apache.thrift.scheme.SchemeFactory {
+        public MyPojoStandardScheme getScheme() {
+            return new MyPojoStandardScheme();
+        }
+    }
+
+    private static class MyPojoStandardScheme
+            extends org.apache.thrift.scheme.StandardScheme<MyPojo> {
+
+        public void read(org.apache.thrift.protocol.TProtocol iprot, MyPojo struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TField schemeField;
+            iprot.readStructBegin();
+            while (true) {
+                schemeField = iprot.readFieldBegin();
+                if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+                    break;
+                }
+                switch (schemeField.id) {
+                    case 1: // ID
+                        if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                            struct.id = iprot.readI32();
+                            struct.setIdIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 2: // NAME
+                        if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                            struct.name = iprot.readString();
+                            struct.setNameIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 3: // OPERATION_NAME
+                        if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+                            {
+                                org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
+                                struct.operationName =
+                                        new java.util.ArrayList<java.lang.String>(_list0.size);
+                                @org.apache.thrift.annotation.Nullable java.lang.String _elem1;
+                                for (int _i2 = 0; _i2 < _list0.size; ++_i2) {
+                                    _elem1 = iprot.readString();
+                                    struct.operationName.add(_elem1);
+                                }
+                                iprot.readListEnd();
+                            }
+                            struct.setOperationNameIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 4: // OPERATIONS
+                        if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+                            {
+                                org.apache.thrift.protocol.TList _list3 = iprot.readListBegin();
+                                struct.operations =
+                                        new java.util.ArrayList<MyOperation>(_list3.size);
+                                @org.apache.thrift.annotation.Nullable MyOperation _elem4;
+                                for (int _i5 = 0; _i5 < _list3.size; ++_i5) {
+                                    _elem4 = new MyOperation();
+                                    _elem4.read(iprot);
+                                    struct.operations.add(_elem4);
+                                }
+                                iprot.readListEnd();
+                            }
+                            struct.setOperationsIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 5: // OTHER_ID1
+                        if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                            struct.otherId1 = iprot.readI32();
+                            struct.setOtherId1IsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 6: // OTHER_ID2
+                        if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                            struct.otherId2 = iprot.readI32();
+                            struct.setOtherId2IsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 7: // OTHER_ID3
+                        if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                            struct.otherId3 = iprot.readI32();
+                            struct.setOtherId3IsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    case 8: // SOME_OBJECT
+                        if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+                            struct.someObject = iprot.readString();
+                            struct.setSomeObjectIsSet(true);
+                        } else {
+                            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                        }
+                        break;
+                    default:
+                        org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+                }
+                iprot.readFieldEnd();
+            }
+            iprot.readStructEnd();
+
+            // check for required fields of primitive type, which can't be checked in the validate
+            // method
+            struct.validate();
+        }
+
+        public void write(org.apache.thrift.protocol.TProtocol oprot, MyPojo struct)
+                throws org.apache.thrift.TException {
+            struct.validate();
+
+            oprot.writeStructBegin(STRUCT_DESC);
+            oprot.writeFieldBegin(ID_FIELD_DESC);
+            oprot.writeI32(struct.id);
+            oprot.writeFieldEnd();
+            if (struct.name != null) {
+                oprot.writeFieldBegin(NAME_FIELD_DESC);
+                oprot.writeString(struct.name);
+                oprot.writeFieldEnd();
+            }
+            if (struct.operationName != null) {
+                oprot.writeFieldBegin(OPERATION_NAME_FIELD_DESC);
+                {
+                    oprot.writeListBegin(
+                            new org.apache.thrift.protocol.TList(
+                                    org.apache.thrift.protocol.TType.STRING,
+                                    struct.operationName.size()));
+                    for (java.lang.String _iter6 : struct.operationName) {
+                        oprot.writeString(_iter6);
+                    }
+                    oprot.writeListEnd();
+                }
+                oprot.writeFieldEnd();
+            }
+            if (struct.operations != null) {
+                oprot.writeFieldBegin(OPERATIONS_FIELD_DESC);
+                {
+                    oprot.writeListBegin(
+                            new org.apache.thrift.protocol.TList(
+                                    org.apache.thrift.protocol.TType.STRUCT,
+                                    struct.operations.size()));
+                    for (MyOperation _iter7 : struct.operations) {
+                        _iter7.write(oprot);
+                    }
+                    oprot.writeListEnd();
+                }
+                oprot.writeFieldEnd();
+            }
+            oprot.writeFieldBegin(OTHER_ID1_FIELD_DESC);
+            oprot.writeI32(struct.otherId1);
+            oprot.writeFieldEnd();
+            oprot.writeFieldBegin(OTHER_ID2_FIELD_DESC);
+            oprot.writeI32(struct.otherId2);
+            oprot.writeFieldEnd();
+            oprot.writeFieldBegin(OTHER_ID3_FIELD_DESC);
+            oprot.writeI32(struct.otherId3);
+            oprot.writeFieldEnd();
+            if (struct.someObject != null) {
+                if (struct.isSetSomeObject()) {
+                    oprot.writeFieldBegin(SOME_OBJECT_FIELD_DESC);
+                    oprot.writeString(struct.someObject);
+                    oprot.writeFieldEnd();
+                }
+            }
+            oprot.writeFieldStop();
+            oprot.writeStructEnd();
+        }
+    }
+
+    private static class MyPojoTupleSchemeFactory
+            implements org.apache.thrift.scheme.SchemeFactory {
+        public MyPojoTupleScheme getScheme() {
+            return new MyPojoTupleScheme();
+        }
+    }
+
+    private static class MyPojoTupleScheme extends org.apache.thrift.scheme.TupleScheme<MyPojo> {
+
+        @Override
+        public void write(org.apache.thrift.protocol.TProtocol prot, MyPojo struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TTupleProtocol oprot =
+                    (org.apache.thrift.protocol.TTupleProtocol) prot;
+            java.util.BitSet optionals = new java.util.BitSet();
+            if (struct.isSetId()) {
+                optionals.set(0);
+            }
+            if (struct.isSetName()) {
+                optionals.set(1);
+            }
+            if (struct.isSetOperationName()) {
+                optionals.set(2);
+            }
+            if (struct.isSetOperations()) {
+                optionals.set(3);
+            }
+            if (struct.isSetOtherId1()) {
+                optionals.set(4);
+            }
+            if (struct.isSetOtherId2()) {
+                optionals.set(5);
+            }
+            if (struct.isSetOtherId3()) {
+                optionals.set(6);
+            }
+            if (struct.isSetSomeObject()) {
+                optionals.set(7);
+            }
+            oprot.writeBitSet(optionals, 8);
+            if (struct.isSetId()) {
+                oprot.writeI32(struct.id);
+            }
+            if (struct.isSetName()) {
+                oprot.writeString(struct.name);
+            }
+            if (struct.isSetOperationName()) {
+                {
+                    oprot.writeI32(struct.operationName.size());
+                    for (java.lang.String _iter8 : struct.operationName) {
+                        oprot.writeString(_iter8);
+                    }
+                }
+            }
+            if (struct.isSetOperations()) {
+                {
+                    oprot.writeI32(struct.operations.size());
+                    for (MyOperation _iter9 : struct.operations) {
+                        _iter9.write(oprot);
+                    }
+                }
+            }
+            if (struct.isSetOtherId1()) {
+                oprot.writeI32(struct.otherId1);
+            }
+            if (struct.isSetOtherId2()) {
+                oprot.writeI32(struct.otherId2);
+            }
+            if (struct.isSetOtherId3()) {
+                oprot.writeI32(struct.otherId3);
+            }
+            if (struct.isSetSomeObject()) {
+                oprot.writeString(struct.someObject);
+            }
+        }
+
+        @Override
+        public void read(org.apache.thrift.protocol.TProtocol prot, MyPojo struct)
+                throws org.apache.thrift.TException {
+            org.apache.thrift.protocol.TTupleProtocol iprot =
+                    (org.apache.thrift.protocol.TTupleProtocol) prot;
+            java.util.BitSet incoming = iprot.readBitSet(8);
+            if (incoming.get(0)) {
+                struct.id = iprot.readI32();
+                struct.setIdIsSet(true);
+            }
+            if (incoming.get(1)) {
+                struct.name = iprot.readString();
+                struct.setNameIsSet(true);
+            }
+            if (incoming.get(2)) {
+                {
+                    org.apache.thrift.protocol.TList _list10 =
+                            new org.apache.thrift.protocol.TList(
+                                    org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+                    struct.operationName = new java.util.ArrayList<java.lang.String>(_list10.size);
+                    @org.apache.thrift.annotation.Nullable java.lang.String _elem11;
+                    for (int _i12 = 0; _i12 < _list10.size; ++_i12) {
+                        _elem11 = iprot.readString();
+                        struct.operationName.add(_elem11);
+                    }
+                }
+                struct.setOperationNameIsSet(true);
+            }
+            if (incoming.get(3)) {
+                {
+                    org.apache.thrift.protocol.TList _list13 =
+                            new org.apache.thrift.protocol.TList(
+                                    org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+                    struct.operations = new java.util.ArrayList<MyOperation>(_list13.size);
+                    @org.apache.thrift.annotation.Nullable MyOperation _elem14;
+                    for (int _i15 = 0; _i15 < _list13.size; ++_i15) {
+                        _elem14 = new MyOperation();
+                        _elem14.read(iprot);
+                        struct.operations.add(_elem14);
+                    }
+                }
+                struct.setOperationsIsSet(true);
+            }
+            if (incoming.get(4)) {
+                struct.otherId1 = iprot.readI32();
+                struct.setOtherId1IsSet(true);
+            }
+            if (incoming.get(5)) {
+                struct.otherId2 = iprot.readI32();
+                struct.setOtherId2IsSet(true);
+            }
+            if (incoming.get(6)) {
+                struct.otherId3 = iprot.readI32();
+                struct.setOtherId3IsSet(true);
+            }
+            if (incoming.get(7)) {
+                struct.someObject = iprot.readString();
+                struct.setSomeObjectIsSet(true);
+            }
+        }
+    }
+}
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/SchedulerBenchmarkExecutorBase.java b/src/main/java/org/apache/flink/scheduler/benchmark/SchedulerBenchmarkExecutorBase.java
index fffbdfa..724deb0 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/SchedulerBenchmarkExecutorBase.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/SchedulerBenchmarkExecutorBase.java
@@ -32,27 +32,28 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 
 import java.util.concurrent.TimeUnit;
 
-/**
- * The base class of all benchmarks related to the scheduler.
- */
+/** The base class of all benchmarks related to the scheduler. */
 @SuppressWarnings("MethodMayBeStatic")
 @State(Scope.Thread)
 @OutputTimeUnit(TimeUnit.MILLISECONDS)
 @BenchmarkMode(Mode.AverageTime)
-@Fork(value = 6, jvmArgsAppend = {
-		"-Djava.rmi.server.hostname=127.0.0.1",
-		"-Dcom.sun.management.jmxremote.authenticate=false",
-		"-Dcom.sun.management.jmxremote.ssl=false",
-		"-Dcom.sun.management.jmxremote.ssl"
-})
+@Fork(
+        value = 6,
+        jvmArgsAppend = {
+            "-Djava.rmi.server.hostname=127.0.0.1",
+            "-Dcom.sun.management.jmxremote.authenticate=false",
+            "-Dcom.sun.management.jmxremote.ssl=false",
+            "-Dcom.sun.management.jmxremote.ssl"
+        })
 public class SchedulerBenchmarkExecutorBase {
 
-	public static void runBenchmark(Class<?> clazz) throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + clazz.getCanonicalName() + ".*")
-				.build();
+    public static void runBenchmark(Class<?> clazz) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + clazz.getCanonicalName() + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingDownstreamTasksInBatchJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingDownstreamTasksInBatchJobBenchmarkExecutor.java
index 89a46f7..ee63873 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingDownstreamTasksInBatchJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingDownstreamTasksInBatchJobBenchmarkExecutor.java
@@ -33,34 +33,35 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
 /**
- * The benchmark of deploying downstream tasks in a BATCH job.
- * The related method is {@link Execution#deploy}.
+ * The benchmark of deploying downstream tasks in a BATCH job. The related method is {@link
+ * Execution#deploy}.
  */
-public class DeployingDownstreamTasksInBatchJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
+public class DeployingDownstreamTasksInBatchJobBenchmarkExecutor
+        extends SchedulerBenchmarkExecutorBase {
 
-	@Param("BATCH")
-	private JobConfiguration jobConfiguration;
+    @Param("BATCH")
+    private JobConfiguration jobConfiguration;
 
-	private DeployingDownstreamTasksInBatchJobBenchmark benchmark;
+    private DeployingDownstreamTasksInBatchJobBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(DeployingDownstreamTasksInBatchJobBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(DeployingDownstreamTasksInBatchJobBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new DeployingDownstreamTasksInBatchJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new DeployingDownstreamTasksInBatchJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void deployDownstreamTasks() throws Exception {
-		benchmark.deployDownstreamTasks();
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void deployDownstreamTasks() throws Exception {
+        benchmark.deployDownstreamTasks();
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingTasksInStreamingJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingTasksInStreamingJobBenchmarkExecutor.java
index bbabaf4..2115330 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingTasksInStreamingJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/deploying/DeployingTasksInStreamingJobBenchmarkExecutor.java
@@ -33,34 +33,34 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
 /**
- * The benchmark of deploying tasks in a STREAMING job.
- * The related method is {@link Execution#deploy}.
+ * The benchmark of deploying tasks in a STREAMING job. The related method is {@link
+ * Execution#deploy}.
  */
 public class DeployingTasksInStreamingJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param("STREAMING")
-	private JobConfiguration jobConfiguration;
+    @Param("STREAMING")
+    private JobConfiguration jobConfiguration;
 
-	private DeployingTasksInStreamingJobBenchmark benchmark;
+    private DeployingTasksInStreamingJobBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(DeployingTasksInStreamingJobBenchmark.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(DeployingTasksInStreamingJobBenchmark.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new DeployingTasksInStreamingJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new DeployingTasksInStreamingJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void deployAllTasks() throws Exception {
-		benchmark.deployAllTasks();
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void deployAllTasks() throws Exception {
+        benchmark.deployAllTasks();
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/e2e/CreateSchedulerBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/e2e/CreateSchedulerBenchmarkExecutor.java
index b14a6df..bfb7c5e 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/e2e/CreateSchedulerBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/e2e/CreateSchedulerBenchmarkExecutor.java
@@ -32,34 +32,32 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.infra.Blackhole;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of creating the scheduler in a STREAMING/BATCH job.
- */
+/** The benchmark of creating the scheduler in a STREAMING/BATCH job. */
 public class CreateSchedulerBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param({"BATCH", "STREAMING"})
-	private JobConfiguration jobConfiguration;
+    @Param({"BATCH", "STREAMING"})
+    private JobConfiguration jobConfiguration;
 
-	private CreateSchedulerBenchmark benchmark;
+    private CreateSchedulerBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(CreateSchedulerBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(CreateSchedulerBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new CreateSchedulerBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new CreateSchedulerBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void createScheduler(Blackhole blackhole) throws Exception {
-		blackhole.consume(benchmark.createScheduler());
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void createScheduler(Blackhole blackhole) throws Exception {
+        blackhole.consume(benchmark.createScheduler());
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/e2e/SchedulingAndDeployingBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/e2e/SchedulingAndDeployingBenchmarkExecutor.java
index 153c3d8..9bffc66 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/e2e/SchedulingAndDeployingBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/e2e/SchedulingAndDeployingBenchmarkExecutor.java
@@ -31,34 +31,32 @@ import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of scheduling and deploying tasks in a STREAMING/BATCH job.
- */
+/** The benchmark of scheduling and deploying tasks in a STREAMING/BATCH job. */
 public class SchedulingAndDeployingBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param({"BATCH", "STREAMING"})
-	private JobConfiguration jobConfiguration;
+    @Param({"BATCH", "STREAMING"})
+    private JobConfiguration jobConfiguration;
 
-	private SchedulingAndDeployingBenchmark benchmark;
+    private SchedulingAndDeployingBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(SchedulingAndDeployingBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(SchedulingAndDeployingBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new SchedulingAndDeployingBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new SchedulingAndDeployingBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void startScheduling() throws Exception {
-		benchmark.startScheduling();
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void startScheduling() throws Exception {
+        benchmark.startScheduling();
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInBatchJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInBatchJobBenchmarkExecutor.java
index c8504b9..60920cb 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInBatchJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInBatchJobBenchmarkExecutor.java
@@ -32,34 +32,32 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.infra.Blackhole;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of calculating the regions to restart when failover occurs in a BATCH job.
- */
+/** The benchmark of calculating the regions to restart when failover occurs in a BATCH job. */
 public class RegionToRestartInBatchJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param("BATCH")
-	private JobConfiguration jobConfiguration;
+    @Param("BATCH")
+    private JobConfiguration jobConfiguration;
 
-	private RegionToRestartInBatchJobBenchmark benchmark;
+    private RegionToRestartInBatchJobBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(RegionToRestartInBatchJobBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(RegionToRestartInBatchJobBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new RegionToRestartInBatchJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new RegionToRestartInBatchJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void calculateRegionToRestart(Blackhole blackhole) {
-		blackhole.consume(benchmark.calculateRegionToRestart());
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void calculateRegionToRestart(Blackhole blackhole) {
+        blackhole.consume(benchmark.calculateRegionToRestart());
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInStreamingJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInStreamingJobBenchmarkExecutor.java
index 2b1543e..5512948 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInStreamingJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/failover/RegionToRestartInStreamingJobBenchmarkExecutor.java
@@ -32,34 +32,32 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.infra.Blackhole;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of calculating region to restart when failover occurs in a STREAMING job.
- */
+/** The benchmark of calculating region to restart when failover occurs in a STREAMING job. */
 public class RegionToRestartInStreamingJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param("STREAMING")
-	private JobConfiguration jobConfiguration;
+    @Param("STREAMING")
+    private JobConfiguration jobConfiguration;
 
-	private RegionToRestartInStreamingJobBenchmark benchmark;
+    private RegionToRestartInStreamingJobBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(RegionToRestartInStreamingJobBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(RegionToRestartInStreamingJobBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new RegionToRestartInStreamingJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new RegionToRestartInStreamingJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void calculateRegionToRestart(Blackhole blackhole) {
-		blackhole.consume(benchmark.calculateRegionToRestart());
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void calculateRegionToRestart(Blackhole blackhole) {
+        blackhole.consume(benchmark.calculateRegionToRestart());
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/partitionrelease/PartitionReleaseInBatchJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/partitionrelease/PartitionReleaseInBatchJobBenchmarkExecutor.java
index af4cefa..21c3eb8 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/partitionrelease/PartitionReleaseInBatchJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/partitionrelease/PartitionReleaseInBatchJobBenchmarkExecutor.java
@@ -31,34 +31,32 @@ import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of releasing partitions in a BATCH job.
- */
+/** The benchmark of releasing partitions in a BATCH job. */
 public class PartitionReleaseInBatchJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param("BATCH")
-	private JobConfiguration jobConfiguration;
+    @Param("BATCH")
+    private JobConfiguration jobConfiguration;
 
-	private PartitionReleaseInBatchJobBenchmark benchmark;
+    private PartitionReleaseInBatchJobBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(PartitionReleaseInBatchJobBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(PartitionReleaseInBatchJobBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new PartitionReleaseInBatchJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new PartitionReleaseInBatchJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void partitionRelease() {
-		benchmark.partitionRelease();
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void partitionRelease() {
+        benchmark.partitionRelease();
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/InitSchedulingStrategyBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/InitSchedulingStrategyBenchmarkExecutor.java
index 2674a0b..db04a9c 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/InitSchedulingStrategyBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/InitSchedulingStrategyBenchmarkExecutor.java
@@ -32,34 +32,32 @@ import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.infra.Blackhole;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of initializing the scheduling strategy in a STREAMING/BATCH job.
- */
+/** The benchmark of initializing the scheduling strategy in a STREAMING/BATCH job. */
 public class InitSchedulingStrategyBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param({"BATCH", "STREAMING"})
-	private JobConfiguration jobConfiguration;
+    @Param({"BATCH", "STREAMING"})
+    private JobConfiguration jobConfiguration;
 
-	private InitSchedulingStrategyBenchmark benchmark;
+    private InitSchedulingStrategyBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(InitSchedulingStrategyBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(InitSchedulingStrategyBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new InitSchedulingStrategyBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new InitSchedulingStrategyBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void initSchedulingStrategy(Blackhole blackhole) {
-		blackhole.consume(benchmark.initSchedulingStrategy());
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void initSchedulingStrategy(Blackhole blackhole) {
+        blackhole.consume(benchmark.initSchedulingStrategy());
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.java
index 586ec6a..7b05575 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/scheduling/SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.java
@@ -31,34 +31,33 @@ import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of scheduling downstream task in a BATCH job.
- */
-public class SchedulingDownstreamTasksInBatchJobBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
-
-	@Param({"BATCH"})
-	private JobConfiguration jobConfiguration;
-
-	private SchedulingDownstreamTasksInBatchJobBenchmark benchmark;
-
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.class);
-	}
-
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new SchedulingDownstreamTasksInBatchJobBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
-
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void schedulingDownstreamTasks() {
-		benchmark.schedulingDownstreamTasks();
-	}
-
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+/** The benchmark of scheduling downstream task in a BATCH job. */
+public class SchedulingDownstreamTasksInBatchJobBenchmarkExecutor
+        extends SchedulerBenchmarkExecutorBase {
+
+    @Param({"BATCH"})
+    private JobConfiguration jobConfiguration;
+
+    private SchedulingDownstreamTasksInBatchJobBenchmark benchmark;
+
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(SchedulingDownstreamTasksInBatchJobBenchmarkExecutor.class);
+    }
+
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new SchedulingDownstreamTasksInBatchJobBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
+
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void schedulingDownstreamTasks() {
+        benchmark.schedulingDownstreamTasks();
+    }
+
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/scheduler/benchmark/topology/BuildExecutionGraphBenchmarkExecutor.java b/src/main/java/org/apache/flink/scheduler/benchmark/topology/BuildExecutionGraphBenchmarkExecutor.java
index d5f8496..931a43e 100644
--- a/src/main/java/org/apache/flink/scheduler/benchmark/topology/BuildExecutionGraphBenchmarkExecutor.java
+++ b/src/main/java/org/apache/flink/scheduler/benchmark/topology/BuildExecutionGraphBenchmarkExecutor.java
@@ -31,34 +31,32 @@ import org.openjdk.jmh.annotations.Setup;
 import org.openjdk.jmh.annotations.TearDown;
 import org.openjdk.jmh.runner.RunnerException;
 
-/**
- * The benchmark of building the topology of ExecutionGraph in a STREAMING/BATCH job.
- */
+/** The benchmark of building the topology of ExecutionGraph in a STREAMING/BATCH job. */
 public class BuildExecutionGraphBenchmarkExecutor extends SchedulerBenchmarkExecutorBase {
 
-	@Param({"BATCH", "STREAMING"})
-	private JobConfiguration jobConfiguration;
+    @Param({"BATCH", "STREAMING"})
+    private JobConfiguration jobConfiguration;
 
-	private BuildExecutionGraphBenchmark benchmark;
+    private BuildExecutionGraphBenchmark benchmark;
 
-	public static void main(String[] args) throws RunnerException {
-		runBenchmark(BuildExecutionGraphBenchmarkExecutor.class);
-	}
+    public static void main(String[] args) throws RunnerException {
+        runBenchmark(BuildExecutionGraphBenchmarkExecutor.class);
+    }
 
-	@Setup(Level.Trial)
-	public void setup() throws Exception {
-		benchmark = new BuildExecutionGraphBenchmark();
-		benchmark.setup(jobConfiguration);
-	}
+    @Setup(Level.Trial)
+    public void setup() throws Exception {
+        benchmark = new BuildExecutionGraphBenchmark();
+        benchmark.setup(jobConfiguration);
+    }
 
-	@Benchmark
-	@BenchmarkMode(Mode.SingleShotTime)
-	public void buildTopology() throws Exception {
-		benchmark.buildTopology();
-	}
+    @Benchmark
+    @BenchmarkMode(Mode.SingleShotTime)
+    public void buildTopology() throws Exception {
+        benchmark.buildTopology();
+    }
 
-	@TearDown(Level.Trial)
-	public void teardown() {
-		benchmark.teardown();
-	}
+    @TearDown(Level.Trial)
+    public void teardown() {
+        benchmark.teardown();
+    }
 }
diff --git a/src/main/java/org/apache/flink/state/benchmark/ListStateBenchmark.java b/src/main/java/org/apache/flink/state/benchmark/ListStateBenchmark.java
index d81e16e..0690eb2 100644
--- a/src/main/java/org/apache/flink/state/benchmark/ListStateBenchmark.java
+++ b/src/main/java/org/apache/flink/state/benchmark/ListStateBenchmark.java
@@ -44,20 +44,20 @@ import static org.apache.flink.contrib.streaming.state.benchmark.StateBackendBen
 import static org.apache.flink.state.benchmark.StateBenchmarkConstants.listValueCount;
 import static org.apache.flink.state.benchmark.StateBenchmarkConstants.setupKeyCount;
 
-/**
- * Implementation for list state benchmark testing.
- */
+/** Implementation for list state benchmark testing. */
 public class ListStateBenchmark extends StateBenchmarkBase {
     private final String STATE_NAME = "listState";
-    private final ListStateDescriptor<Long> STATE_DESC = new ListStateDescriptor<>(STATE_NAME, Long.class);
+    private final ListStateDescriptor<Long> STATE_DESC =
+            new ListStateDescriptor<>(STATE_NAME, Long.class);
     private ListState<Long> listState;
     private List<Long> dummyLists;
 
     public static void main(String[] args) throws RunnerException {
-        Options opt = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + ListStateBenchmark.class.getCanonicalName() + ".*")
-                .build();
+        Options opt =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + ListStateBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
         new Runner(opt).run();
     }
@@ -83,7 +83,8 @@ public class ListStateBenchmark extends StateBenchmarkBase {
         // to prevent the spike caused by different key distribution in multiple sst files,
         // the more access to the older sst file, the lower throughput will be.
         if (keyedStateBackend instanceof RocksDBKeyedStateBackend) {
-            RocksDBKeyedStateBackend<Long> rocksDBKeyedStateBackend = (RocksDBKeyedStateBackend<Long>) keyedStateBackend;
+            RocksDBKeyedStateBackend<Long> rocksDBKeyedStateBackend =
+                    (RocksDBKeyedStateBackend<Long>) keyedStateBackend;
             compactState(rocksDBKeyedStateBackend, STATE_DESC);
         }
     }
@@ -99,7 +100,8 @@ public class ListStateBenchmark extends StateBenchmarkBase {
                 });
         // make the clearance effective, trigger compaction for RocksDB, and GC for heap.
         if (keyedStateBackend instanceof RocksDBKeyedStateBackend) {
-            RocksDBKeyedStateBackend<Long> rocksDBKeyedStateBackend = (RocksDBKeyedStateBackend<Long>) keyedStateBackend;
+            RocksDBKeyedStateBackend<Long> rocksDBKeyedStateBackend =
+                    (RocksDBKeyedStateBackend<Long>) keyedStateBackend;
             compactState(rocksDBKeyedStateBackend, STATE_DESC);
         } else {
             System.gc();
@@ -146,4 +148,4 @@ public class ListStateBenchmark extends StateBenchmarkBase {
         keyedStateBackend.setCurrentKey(keyValue.setUpKey);
         listState.addAll(dummyLists);
     }
-}
\ No newline at end of file
+}
diff --git a/src/main/java/org/apache/flink/state/benchmark/MapStateBenchmark.java b/src/main/java/org/apache/flink/state/benchmark/MapStateBenchmark.java
index 3c9e928..52494c0 100644
--- a/src/main/java/org/apache/flink/state/benchmark/MapStateBenchmark.java
+++ b/src/main/java/org/apache/flink/state/benchmark/MapStateBenchmark.java
@@ -42,18 +42,17 @@ import static org.apache.flink.state.benchmark.StateBenchmarkConstants.mapKeyCou
 import static org.apache.flink.state.benchmark.StateBenchmarkConstants.mapKeys;
 import static org.apache.flink.state.benchmark.StateBenchmarkConstants.setupKeyCount;
 
-/**
- * Implementation for map state benchmark testing.
- */
+/** Implementation for map state benchmark testing. */
 public class MapStateBenchmark extends StateBenchmarkBase {
     private MapState<Long, Double> mapState;
     private Map<Long, Double> dummyMaps;
 
     public static void main(String[] args) throws RunnerException {
-        Options opt = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + MapStateBenchmark.class.getCanonicalName() + ".*")
-                .build();
+        Options opt =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + MapStateBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
         new Runner(opt).run();
     }
@@ -61,9 +60,10 @@ public class MapStateBenchmark extends StateBenchmarkBase {
     @Setup
     public void setUp() throws Exception {
         keyedStateBackend = createKeyedStateBackend(backendType);
-        mapState = getMapState(
-                keyedStateBackend,
-                new MapStateDescriptor<>("mapState", Long.class, Double.class));
+        mapState =
+                getMapState(
+                        keyedStateBackend,
+                        new MapStateDescriptor<>("mapState", Long.class, Double.class));
         dummyMaps = new HashMap<>(mapKeyCount);
         for (int i = 0; i < mapKeyCount; ++i) {
             dummyMaps.put(mapKeys.get(i), random.nextDouble());
diff --git a/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkBase.java b/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkBase.java
index 31199dc..c053f29 100644
--- a/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkBase.java
+++ b/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkBase.java
@@ -47,20 +47,14 @@ import static org.apache.flink.state.benchmark.StateBenchmarkConstants.setupKeys
 
 /** Base implementation of the state benchmarks. */
 public class StateBenchmarkBase extends BenchmarkBase {
-    KeyedStateBackend<Long> keyedStateBackend;
+    // TODO: why AtomicInteger?
+    static AtomicInteger keyIndex;
+    final ThreadLocalRandom random = ThreadLocalRandom.current();
 
     @Param({"HEAP", "ROCKSDB"})
     protected StateBackendBenchmarkUtils.StateBackendType backendType;
 
-    final ThreadLocalRandom random = ThreadLocalRandom.current();
-
-    @TearDown
-    public void tearDown() throws IOException {
-        cleanUp(keyedStateBackend);
-    }
-
-    // TODO: why AtomicInteger?
-    static AtomicInteger keyIndex;
+    KeyedStateBackend<Long> keyedStateBackend;
 
     private static int getCurrentIndex() {
         int currentIndex = keyIndex.getAndIncrement();
@@ -70,8 +64,20 @@ public class StateBenchmarkBase extends BenchmarkBase {
         return currentIndex;
     }
 
+    @TearDown
+    public void tearDown() throws IOException {
+        cleanUp(keyedStateBackend);
+    }
+
     @State(Scope.Thread)
     public static class KeyValue {
+        long newKey;
+        long setUpKey;
+        long mapKey;
+        double mapValue;
+        long value;
+        List<Long> listValue;
+
         @Setup(Level.Invocation)
         public void kvSetup() {
             int currentIndex = getCurrentIndex();
@@ -91,12 +97,5 @@ public class StateBenchmarkBase extends BenchmarkBase {
         public void kvTearDown() {
             listValue = null;
         }
-
-        long newKey;
-        long setUpKey;
-        long mapKey;
-        double mapValue;
-        long value;
-        List<Long> listValue;
     }
 }
diff --git a/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkConstants.java b/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkConstants.java
index 890ec7d..1bb9eed 100644
--- a/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkConstants.java
+++ b/src/main/java/org/apache/flink/state/benchmark/StateBenchmarkConstants.java
@@ -36,6 +36,12 @@ class StateBenchmarkConstants {
     static final String dbDirName = "dbPath";
 
     static final ArrayList<Long> mapKeys = new ArrayList<>(mapKeyCount);
+    static final ArrayList<Double> mapValues = new ArrayList<>(mapKeyCount);
+    static final ArrayList<Long> setupKeys = new ArrayList<>(setupKeyCount);
+    static final int newKeyCount = 500_000;
+    static final ArrayList<Long> newKeys = new ArrayList<>(newKeyCount);
+    static final int randomValueCount = 1_000_000;
+    static final ArrayList<Long> randomValues = new ArrayList<>(randomValueCount);
 
     static {
         for (int i = 0; i < mapKeyCount; i++) {
@@ -44,8 +50,6 @@ class StateBenchmarkConstants {
         Collections.shuffle(mapKeys);
     }
 
-    static final ArrayList<Double> mapValues = new ArrayList<>(mapKeyCount);
-
     static {
         Random random = new Random();
         for (int i = 0; i < mapKeyCount; i++) {
@@ -54,8 +58,6 @@ class StateBenchmarkConstants {
         Collections.shuffle(mapValues);
     }
 
-    static final ArrayList<Long> setupKeys = new ArrayList<>(setupKeyCount);
-
     static {
         for (long i = 0; i < setupKeyCount; i++) {
             setupKeys.add(i);
@@ -63,9 +65,6 @@ class StateBenchmarkConstants {
         Collections.shuffle(setupKeys);
     }
 
-    static final int newKeyCount = 500_000;
-    static final ArrayList<Long> newKeys = new ArrayList<>(newKeyCount);
-
     static {
         for (long i = 0; i < newKeyCount; i++) {
             newKeys.add(i + setupKeyCount);
@@ -73,9 +72,6 @@ class StateBenchmarkConstants {
         Collections.shuffle(newKeys);
     }
 
-    static final int randomValueCount = 1_000_000;
-    static final ArrayList<Long> randomValues = new ArrayList<>(randomValueCount);
-
     static {
         for (long i = 0; i < randomValueCount; i++) {
             randomValues.add(i);
diff --git a/src/main/java/org/apache/flink/state/benchmark/ValueStateBenchmark.java b/src/main/java/org/apache/flink/state/benchmark/ValueStateBenchmark.java
index 4191619..92350d0 100644
--- a/src/main/java/org/apache/flink/state/benchmark/ValueStateBenchmark.java
+++ b/src/main/java/org/apache/flink/state/benchmark/ValueStateBenchmark.java
@@ -36,17 +36,16 @@ import static org.apache.flink.contrib.streaming.state.benchmark.StateBackendBen
 import static org.apache.flink.contrib.streaming.state.benchmark.StateBackendBenchmarkUtils.getValueState;
 import static org.apache.flink.state.benchmark.StateBenchmarkConstants.setupKeyCount;
 
-/**
- * Implementation for listValue state benchmark testing.
- */
+/** Implementation for listValue state benchmark testing. */
 public class ValueStateBenchmark extends StateBenchmarkBase {
     private ValueState<Long> valueState;
 
     public static void main(String[] args) throws RunnerException {
-        Options opt = new OptionsBuilder()
-                .verbosity(VerboseMode.NORMAL)
-                .include(".*" + ValueStateBenchmark.class.getCanonicalName() + ".*")
-                .build();
+        Options opt =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(".*" + ValueStateBenchmark.class.getCanonicalName() + ".*")
+                        .build();
 
         new Runner(opt).run();
     }
@@ -54,9 +53,8 @@ public class ValueStateBenchmark extends StateBenchmarkBase {
     @Setup
     public void setUp() throws Exception {
         keyedStateBackend = createKeyedStateBackend(backendType);
-        valueState = getValueState(
-                keyedStateBackend,
-                new ValueStateDescriptor<>("kvState", Long.class));
+        valueState =
+                getValueState(keyedStateBackend, new ValueStateDescriptor<>("kvState", Long.class));
         for (int i = 0; i < setupKeyCount; ++i) {
             keyedStateBackend.setCurrentKey((long) i);
             valueState.update(random.nextLong());
diff --git a/src/test/java/org/apache/flink/benchmark/DataSkewStreamNetworkThroughputBenchmarkExecutor.java b/src/test/java/org/apache/flink/benchmark/DataSkewStreamNetworkThroughputBenchmarkExecutor.java
index 5da9040..30ccb1c 100644
--- a/src/test/java/org/apache/flink/benchmark/DataSkewStreamNetworkThroughputBenchmarkExecutor.java
+++ b/src/test/java/org/apache/flink/benchmark/DataSkewStreamNetworkThroughputBenchmarkExecutor.java
@@ -33,46 +33,47 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 
 import static org.openjdk.jmh.annotations.Scope.Thread;
 
-/**
- * JMH throughput benchmark runner for data skew scenario.
- */
-@OperationsPerInvocation(value = DataSkewStreamNetworkThroughputBenchmarkExecutor.RECORDS_PER_INVOCATION)
+/** JMH throughput benchmark runner for data skew scenario. */
+@OperationsPerInvocation(
+        value = DataSkewStreamNetworkThroughputBenchmarkExecutor.RECORDS_PER_INVOCATION)
 public class DataSkewStreamNetworkThroughputBenchmarkExecutor extends BenchmarkBase {
 
-	static final int RECORDS_PER_INVOCATION = 5_000_000;
+    static final int RECORDS_PER_INVOCATION = 5_000_000;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + DataSkewStreamNetworkThroughputBenchmarkExecutor.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + DataSkewStreamNetworkThroughputBenchmarkExecutor.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void networkSkewedThroughput(MultiEnvironment context) throws Exception {
-		context.executeBenchmark(RECORDS_PER_INVOCATION);
-	}
+    @Benchmark
+    public void networkSkewedThroughput(MultiEnvironment context) throws Exception {
+        context.executeBenchmark(RECORDS_PER_INVOCATION);
+    }
 
-	/**
-	 * Setup for the benchmark(s).
-	 */
-	@State(Thread)
-	public static class MultiEnvironment extends DataSkewStreamNetworkThroughputBenchmark {
-		// 1ms buffer timeout
-		private final int flushTimeout = 1;
+    /** Setup for the benchmark(s). */
+    @State(Thread)
+    public static class MultiEnvironment extends DataSkewStreamNetworkThroughputBenchmark {
+        // 1ms buffer timeout
+        private final int flushTimeout = 1;
 
-		// 1000 num of channels (subpartitions)
-		private final int channels = 1000;
+        // 1000 num of channels (subpartitions)
+        private final int channels = 1000;
 
-		// 10 writer threads, to increase the load on the machine
-		private final int writers = 10;
+        // 10 writer threads, to increase the load on the machine
+        private final int writers = 10;
 
-		@Setup
-		public void setUp() throws Exception {
-			setUp(writers, channels, flushTimeout, false, false, -1, -1, new Configuration());
-		}
-	}
+        @Setup
+        public void setUp() throws Exception {
+            setUp(writers, channels, flushTimeout, false, false, -1, -1, new Configuration());
+        }
+    }
 }
diff --git a/src/test/java/org/apache/flink/benchmark/StreamNetworkBroadcastThroughputBenchmarkExecutor.java b/src/test/java/org/apache/flink/benchmark/StreamNetworkBroadcastThroughputBenchmarkExecutor.java
index 0b24888..462da06 100644
--- a/src/test/java/org/apache/flink/benchmark/StreamNetworkBroadcastThroughputBenchmarkExecutor.java
+++ b/src/test/java/org/apache/flink/benchmark/StreamNetworkBroadcastThroughputBenchmarkExecutor.java
@@ -33,43 +33,44 @@ import org.openjdk.jmh.runner.options.VerboseMode;
 
 import static org.openjdk.jmh.annotations.Scope.Thread;
 
-/**
- * JMH throughput benchmark runner.
- */
-@OperationsPerInvocation(value = StreamNetworkBroadcastThroughputBenchmarkExecutor.RECORDS_PER_INVOCATION)
+/** JMH throughput benchmark runner. */
+@OperationsPerInvocation(
+        value = StreamNetworkBroadcastThroughputBenchmarkExecutor.RECORDS_PER_INVOCATION)
 public class StreamNetworkBroadcastThroughputBenchmarkExecutor extends BenchmarkBase {
 
-	static final int RECORDS_PER_INVOCATION = 500_000;
+    static final int RECORDS_PER_INVOCATION = 500_000;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + StreamNetworkBroadcastThroughputBenchmarkExecutor.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + StreamNetworkBroadcastThroughputBenchmarkExecutor.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void networkBroadcastThroughput(MultiEnvironment context) throws Exception {
-		context.executeBenchmark(RECORDS_PER_INVOCATION);
-	}
+    @Benchmark
+    public void networkBroadcastThroughput(MultiEnvironment context) throws Exception {
+        context.executeBenchmark(RECORDS_PER_INVOCATION);
+    }
 
-	/**
-	 * Setup for the benchmark(s).
-	 */
-	@State(Thread)
-	public static class MultiEnvironment extends StreamNetworkBroadcastThroughputBenchmark {
+    /** Setup for the benchmark(s). */
+    @State(Thread)
+    public static class MultiEnvironment extends StreamNetworkBroadcastThroughputBenchmark {
 
-		@Setup
-		public void setUp() throws Exception {
-			super.setUp(4, 100, 100);
-		}
+        @Setup
+        public void setUp() throws Exception {
+            super.setUp(4, 100, 100);
+        }
 
-		@TearDown
-		public void tearDown() throws Exception {
-			super.tearDown();
-		}
-	}
+        @TearDown
+        public void tearDown() throws Exception {
+            super.tearDown();
+        }
+    }
 }
diff --git a/src/test/java/org/apache/flink/benchmark/StreamNetworkLatencyBenchmarkExecutor.java b/src/test/java/org/apache/flink/benchmark/StreamNetworkLatencyBenchmarkExecutor.java
index ef10079..9fbc5ad 100644
--- a/src/test/java/org/apache/flink/benchmark/StreamNetworkLatencyBenchmarkExecutor.java
+++ b/src/test/java/org/apache/flink/benchmark/StreamNetworkLatencyBenchmarkExecutor.java
@@ -36,43 +36,43 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
 import static org.openjdk.jmh.annotations.Mode.AverageTime;
 import static org.openjdk.jmh.annotations.Scope.Thread;
 
-/**
- * JMH latency benchmark runner.
- */
+/** JMH latency benchmark runner. */
 @OutputTimeUnit(MILLISECONDS)
 @BenchmarkMode(AverageTime)
 public class StreamNetworkLatencyBenchmarkExecutor extends BenchmarkBase {
 
-	private static final int RECORDS_PER_INVOCATION = 100;
+    private static final int RECORDS_PER_INVOCATION = 100;
 
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + StreamNetworkLatencyBenchmarkExecutor.class.getCanonicalName() + ".*")
-				.build();
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + StreamNetworkLatencyBenchmarkExecutor.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
 
-		new Runner(options).run();
-	}
+        new Runner(options).run();
+    }
 
-	@Benchmark
-	public void networkLatency1to1(Environment context) throws Exception {
-		context.executeBenchmark(RECORDS_PER_INVOCATION, false);
-	}
+    @Benchmark
+    public void networkLatency1to1(Environment context) throws Exception {
+        context.executeBenchmark(RECORDS_PER_INVOCATION, false);
+    }
 
-	/**
-	 * Setup for the benchmark(s).
-	 */
-	@State(Thread)
-	public static class Environment extends StreamNetworkPointToPointBenchmark {
-		@Setup
-		public void setUp() throws Exception {
-			super.setUp(10);
-		}
+    /** Setup for the benchmark(s). */
+    @State(Thread)
+    public static class Environment extends StreamNetworkPointToPointBenchmark {
+        @Setup
+        public void setUp() throws Exception {
+            super.setUp(10);
+        }
 
-		@TearDown
-		public void tearDown() {
-			super.tearDown();
-		}
-	}
+        @TearDown
+        public void tearDown() {
+            super.tearDown();
+        }
+    }
 }
diff --git a/src/test/java/org/apache/flink/benchmark/StreamNetworkThroughputBenchmarkExecutor.java b/src/test/java/org/apache/flink/benchmark/StreamNetworkThroughputBenchmarkExecutor.java
index 74eec9e..66e748d 100644
--- a/src/test/java/org/apache/flink/benchmark/StreamNetworkThroughputBenchmarkExecutor.java
+++ b/src/test/java/org/apache/flink/benchmark/StreamNetworkThroughputBenchmarkExecutor.java
@@ -39,90 +39,97 @@ import java.util.Arrays;
 import static org.apache.flink.util.Preconditions.checkArgument;
 import static org.openjdk.jmh.annotations.Scope.Thread;
 
-/**
- * JMH throughput benchmark runner.
- */
+/** JMH throughput benchmark runner. */
 @OperationsPerInvocation(value = StreamNetworkThroughputBenchmarkExecutor.RECORDS_PER_INVOCATION)
 public class StreamNetworkThroughputBenchmarkExecutor extends BenchmarkBase {
 
-	static final int RECORDS_PER_INVOCATION = 5_000_000;
-
-	public static void main(String[] args)
-			throws RunnerException {
-		Options options = new OptionsBuilder()
-				.verbosity(VerboseMode.NORMAL)
-				.include(".*" + StreamNetworkThroughputBenchmarkExecutor.class.getCanonicalName() + ".*")
-				.build();
-
-		new Runner(options).run();
-	}
-
-	@Benchmark
-	public void networkThroughput(MultiEnvironment context) throws Exception {
-		context.executeBenchmark(RECORDS_PER_INVOCATION);
-	}
-
-	/**
-	 * Setup for the benchmark(s).
-	 */
-	@State(Thread)
-	public static class MultiEnvironment extends StreamNetworkThroughputBenchmark {
-
-		@Param({"100,100ms", "100,100ms,SSL", "1000,1ms", "1000,100ms", "1000,100ms,SSL", "1000,100ms,OpenSSL"})
-		public String channelsFlushTimeout = "100,100ms";
-
-		//Do not spam continuous benchmarking with number of writers parameter.
-		//@Param({"1", "4"})
-		public int writers = 4;
-
-		@Setup
-		public void setUp() throws Exception {
-			int channels = parseChannels(channelsFlushTimeout);
-			int flushTimeout = parseFlushTimeout(channelsFlushTimeout);
-			String sslProvider = parseEnableSSL(channelsFlushTimeout);
-
-			setUp(
-					writers,
-					channels,
-					flushTimeout,
-					false,
-					false,
-					-1,
-					-1,
-					sslProvider != null ? SSLUtilsTest.createInternalSslConfigWithKeyAndTrustStores(
-							sslProvider) : new Configuration()
-			);
-		}
-
-		private static String parseEnableSSL(String channelsFlushTimeout) {
-			String[] parameters = channelsFlushTimeout.split(",");
-			if (Arrays.asList(parameters).contains("SSL")) {
-				return "JDK";
-			} else if (Arrays.asList(parameters).contains("OpenSSL")) {
-				return "OPENSSL";
-			} else {
-				return null;
-			}
-		}
-
-		private static int parseFlushTimeout(String channelsFlushTimeout) {
-			String[] parameters = channelsFlushTimeout.split(",");
-			checkArgument(parameters.length >= 2);
-			String flushTimeout = parameters[1];
-
-			checkArgument(flushTimeout.endsWith("ms"));
-			return Integer.parseInt(flushTimeout.substring(0, flushTimeout.length() - 2));
-		}
-
-		private static int parseChannels(String channelsFlushTimeout) {
-			String[] parameters = channelsFlushTimeout.split(",");
-			checkArgument(parameters.length >= 1);
-			return Integer.parseInt(parameters[0]);
-		}
-
-		@TearDown
-		public void tearDown() throws Exception {
-			super.tearDown();
-		}
-	}
+    static final int RECORDS_PER_INVOCATION = 5_000_000;
+
+    public static void main(String[] args) throws RunnerException {
+        Options options =
+                new OptionsBuilder()
+                        .verbosity(VerboseMode.NORMAL)
+                        .include(
+                                ".*"
+                                        + StreamNetworkThroughputBenchmarkExecutor.class
+                                                .getCanonicalName()
+                                        + ".*")
+                        .build();
+
+        new Runner(options).run();
+    }
+
+    @Benchmark
+    public void networkThroughput(MultiEnvironment context) throws Exception {
+        context.executeBenchmark(RECORDS_PER_INVOCATION);
+    }
+
+    /** Setup for the benchmark(s). */
+    @State(Thread)
+    public static class MultiEnvironment extends StreamNetworkThroughputBenchmark {
+
+        @Param({
+            "100,100ms",
+            "100,100ms,SSL",
+            "1000,1ms",
+            "1000,100ms",
+            "1000,100ms,SSL",
+            "1000,100ms,OpenSSL"
+        })
+        public String channelsFlushTimeout = "100,100ms";
+
+        // Do not spam continuous benchmarking with number of writers parameter.
+        // @Param({"1", "4"})
+        public int writers = 4;
+
+        private static String parseEnableSSL(String channelsFlushTimeout) {
+            String[] parameters = channelsFlushTimeout.split(",");
+            if (Arrays.asList(parameters).contains("SSL")) {
+                return "JDK";
+            } else if (Arrays.asList(parameters).contains("OpenSSL")) {
+                return "OPENSSL";
+            } else {
+                return null;
+            }
+        }
+
+        private static int parseFlushTimeout(String channelsFlushTimeout) {
+            String[] parameters = channelsFlushTimeout.split(",");
+            checkArgument(parameters.length >= 2);
+            String flushTimeout = parameters[1];
+
+            checkArgument(flushTimeout.endsWith("ms"));
+            return Integer.parseInt(flushTimeout.substring(0, flushTimeout.length() - 2));
+        }
+
+        private static int parseChannels(String channelsFlushTimeout) {
+            String[] parameters = channelsFlushTimeout.split(",");
+            checkArgument(parameters.length >= 1);
+            return Integer.parseInt(parameters[0]);
+        }
+
+        @Setup
+        public void setUp() throws Exception {
+            int channels = parseChannels(channelsFlushTimeout);
+            int flushTimeout = parseFlushTimeout(channelsFlushTimeout);
+            String sslProvider = parseEnableSSL(channelsFlushTimeout);
+
+            setUp(
+                    writers,
+                    channels,
+                    flushTimeout,
+                    false,
+                    false,
+                    -1,
+                    -1,
+                    sslProvider != null
+                            ? SSLUtilsTest.createInternalSslConfigWithKeyAndTrustStores(sslProvider)
+                            : new Configuration());
+        }
+
+        @TearDown
+        public void tearDown() throws Exception {
+            super.tearDown();
+        }
+    }
 }