You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ch...@apache.org on 2017/05/25 11:04:10 UTC

[1/7] flink git commit: [FLINK-6707] [examples] Activate strict checkstyle for flink-examples

Repository: flink
Updated Branches:
  refs/heads/master d481f2950 -> 0e69dd5cc


http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
index 42101a5..01effd1 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/join/WindowJoin.java
@@ -27,13 +27,12 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
 import org.apache.flink.streaming.api.windowing.time.Time;
-
 import org.apache.flink.streaming.examples.join.WindowJoinSampleData.GradeSource;
 import org.apache.flink.streaming.examples.join.WindowJoinSampleData.SalarySource;
 
 /**
  * Example illustrating a windowed stream join between two data streams.
- * 
+ *
  * <p>The example works on two input streams with pairs (name, grade) and (name, salary)
  * respectively. It joins the steams based on "name" within a configurable window.
  *
@@ -52,7 +51,7 @@ public class WindowJoin {
 		final ParameterTool params = ParameterTool.fromArgs(args);
 		final long windowSize = params.getLong("windowSize", 2000);
 		final long rate = params.getLong("rate", 3L);
-		
+
 		System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate);
 		System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]");
 
@@ -66,7 +65,7 @@ public class WindowJoin {
 		// create the data sources for both grades and salaries
 		DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate);
 		DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate);
-		
+
 		// run the actual window join program
 		// for testability, this functionality is in a separate method.
 		DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize);
@@ -77,7 +76,7 @@ public class WindowJoin {
 		// execute program
 		env.execute("Windowed Join Example");
 	}
-	
+
 	public static DataStream<Tuple3<String, Integer, Integer>> runWindowJoin(
 			DataStream<Tuple2<String, Integer>> grades,
 			DataStream<Tuple2<String, Integer>> salaries,
@@ -86,11 +85,11 @@ public class WindowJoin {
 		return grades.join(salaries)
 				.where(new NameKeySelector())
 				.equalTo(new NameKeySelector())
-				
+
 				.window(TumblingEventTimeWindows.of(Time.milliseconds(windowSize)))
-				
+
 				.apply(new JoinFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Integer, Integer>>() {
-					
+
 					@Override
 					public Tuple3<String, Integer, Integer> join(
 									Tuple2<String, Integer> first,
@@ -99,7 +98,7 @@ public class WindowJoin {
 					}
 				});
 	}
-	
+
 	private static class NameKeySelector implements KeySelector<Tuple2<String, Integer>, String> {
 		@Override
 		public String getKey(Tuple2<String, Integer> value) {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/ReadFromKafka.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/ReadFromKafka.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/ReadFromKafka.java
index 1e48739..f9cf42b 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/ReadFromKafka.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/ReadFromKafka.java
@@ -29,9 +29,8 @@ import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
  * Read Strings from Kafka and print them to standard out.
  * Note: On a cluster, DataStream.print() will print to the TaskManager's .out file!
  *
- * Please pass the following arguments to run the example:
+ * <p>Please pass the following arguments to run the example:
  * 	--topic test --bootstrap.servers localhost:9092 --zookeeper.connect localhost:2181 --group.id myconsumer
- *
  */
 public class ReadFromKafka {
 
@@ -39,7 +38,7 @@ public class ReadFromKafka {
 		// parse input arguments
 		final ParameterTool parameterTool = ParameterTool.fromArgs(args);
 
-		if(parameterTool.getNumberOfParameters() < 4) {
+		if (parameterTool.getNumberOfParameters() < 4) {
 			System.out.println("Missing parameters!\nUsage: Kafka --topic <topic> " +
 					"--bootstrap.servers <kafka brokers> --zookeeper.connect <zk quorum> --group.id <some id>");
 			return;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/WriteIntoKafka.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/WriteIntoKafka.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/WriteIntoKafka.java
index 37c86a3..f9b4656 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/WriteIntoKafka.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/kafka/WriteIntoKafka.java
@@ -25,25 +25,23 @@ import org.apache.flink.streaming.api.functions.source.SourceFunction;
 import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer08;
 import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
 
-
 /**
- * Generate a String every 500 ms and write it into a Kafka topic
+ * Generate a String every 500 ms and write it into a Kafka topic.
  *
- * Please pass the following arguments to run the example:
+ * <p>Please pass the following arguments to run the example:
  * 	--topic test --bootstrap.servers localhost:9092
- *
  */
 public class WriteIntoKafka {
 
 	public static void main(String[] args) throws Exception {
 		ParameterTool parameterTool = ParameterTool.fromArgs(args);
-		if(parameterTool.getNumberOfParameters() < 2) {
+		if (parameterTool.getNumberOfParameters() < 2) {
 			System.out.println("Missing parameters!");
 			System.out.println("Usage: Kafka --topic <topic> --bootstrap.servers <kafka brokers>");
 			return;
 		}
 
-		StreamExecutionEnvironment env =StreamExecutionEnvironment.getExecutionEnvironment();
+		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
 		env.getConfig().disableSysoutLogging();
 		env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000));
 
@@ -55,7 +53,7 @@ public class WriteIntoKafka {
 			@Override
 			public void run(SourceContext<String> ctx) throws Exception {
 				long i = 0;
-				while(this.running) {
+				while (this.running) {
 					ctx.collect("Element - " + i++);
 					Thread.sleep(500);
 				}

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/IncrementalLearningSkeleton.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/IncrementalLearningSkeleton.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/IncrementalLearningSkeleton.java
index f10c55e..0063669 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/IncrementalLearningSkeleton.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/IncrementalLearningSkeleton.java
@@ -37,13 +37,11 @@ import java.util.concurrent.TimeUnit;
  * pre-computed model, which gets updated for the new inputs and new input data
  * for which the job provides predictions.
  *
- * <p>
- * This may serve as a base of a number of algorithms, e.g. updating an
+ * <p>This may serve as a base of a number of algorithms, e.g. updating an
  * incremental Alternating Least Squares model while also providing the
  * predictions.
  *
- * <p>
- * This example shows how to use:
+ * <p>This example shows how to use:
  * <ul>
  *   <li>Connected streams
  *   <li>CoFunctions
@@ -147,7 +145,7 @@ public class IncrementalLearningSkeleton {
 		}
 	}
 
-	public static class LinearTimestamp implements AssignerWithPunctuatedWatermarks<Integer> {
+	private static class LinearTimestamp implements AssignerWithPunctuatedWatermarks<Integer> {
 		private static final long serialVersionUID = 1L;
 
 		private long counter = 0L;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/util/IncrementalLearningSkeletonData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/util/IncrementalLearningSkeletonData.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/util/IncrementalLearningSkeletonData.java
index 144af99..191dbc1 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/util/IncrementalLearningSkeletonData.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/ml/util/IncrementalLearningSkeletonData.java
@@ -17,6 +17,9 @@
 
 package org.apache.flink.streaming.examples.ml.util;
 
+/**
+ * Data for IncrementalLearningSkeletonITCase.
+ */
 public class IncrementalLearningSkeletonData {
 
 	public static final String RESULTS = "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" + "1\n" +

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/sideoutput/SideOutputExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/sideoutput/SideOutputExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/sideoutput/SideOutputExample.java
index bfd3cd7..a506624 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/sideoutput/SideOutputExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/sideoutput/SideOutputExample.java
@@ -19,18 +19,18 @@ package org.apache.flink.streaming.examples.sideoutput;
 
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.functions.KeySelector;
-import org.apache.flink.streaming.api.TimeCharacteristic;
-import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
-import org.apache.flink.streaming.api.functions.ProcessFunction;
-import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
-import org.apache.flink.streaming.api.windowing.time.Time;
-import org.apache.flink.util.OutputTag;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.examples.java.wordcount.util.WordCountData;
+import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.functions.ProcessFunction;
+import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
+import org.apache.flink.streaming.api.windowing.time.Time;
 import org.apache.flink.util.Collector;
+import org.apache.flink.util.OutputTag;
 
 /**
  * An example that illustrates the use of side outputs.
@@ -45,7 +45,7 @@ public class SideOutputExample {
 	 * We need to create an {@link OutputTag} so that we can reference it when emitting
 	 * data to a side output and also to retrieve the side output stream from an operation.
 	 */
-	static final OutputTag<String> rejectedWordsTag = new OutputTag<String>("rejected") {};
+	private static final OutputTag<String> rejectedWordsTag = new OutputTag<String>("rejected") {};
 
 	public static void main(String[] args) throws Exception {
 
@@ -75,7 +75,7 @@ public class SideOutputExample {
 		SingleOutputStreamOperator<Tuple2<String, Integer>> tokenized = text
 				.keyBy(new KeySelector<String, Integer>() {
 					private static final long serialVersionUID = 1L;
-					
+
 					@Override
 					public Integer getKey(String value) throws Exception {
 						return 0;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java
index 250c5b9..646a74a 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/socket/SocketWindowWordCount.java
@@ -29,7 +29,7 @@ import org.apache.flink.util.Collector;
 /**
  * Implements a streaming windowed version of the "WordCount" program.
  *
- * This program connects to a server socket and reads strings from the socket.
+ * <p>This program connects to a server socket and reads strings from the socket.
  * The easiest way to try this out is to open a text server (at port 12345)
  * using the <i>netcat</i> tool via
  * <pre>
@@ -95,7 +95,7 @@ public class SocketWindowWordCount {
 	// ------------------------------------------------------------------------
 
 	/**
-	 * Data type for words with count
+	 * Data type for words with count.
 	 */
 	public static class WordWithCount {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/TwitterExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/TwitterExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/TwitterExample.java
index 6d1bce5..73015f0 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/TwitterExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/TwitterExample.java
@@ -17,8 +17,6 @@
 
 package org.apache.flink.streaming.examples.twitter;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.utils.ParameterTool;
@@ -28,23 +26,24 @@ import org.apache.flink.streaming.connectors.twitter.TwitterSource;
 import org.apache.flink.streaming.examples.twitter.util.TwitterExampleData;
 import org.apache.flink.util.Collector;
 
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import java.util.StringTokenizer;
 
 /**
  * Implements the "TwitterStream" program that computes a most used word
  * occurrence over JSON objects in a streaming fashion.
- * <p>
- * The input is a Tweet stream from a TwitterSource.
- * </p>
- * <p>
- * Usage: <code>Usage: TwitterExample [--output <path>]
- * [--twitter-source.consumerKey <key> --twitter-source.consumerSecret <secret> --twitter-source.token <token> --twitter-source.tokenSecret <tokenSecret>]</code><br>
  *
- * If no parameters are provided, the program is run with default data from
+ * <p>The input is a Tweet stream from a TwitterSource.
+ *
+ * <p>Usage: <code>Usage: TwitterExample [--output &lt;path&gt;]
+ * [--twitter-source.consumerKey &lt;key&gt; --twitter-source.consumerSecret &lt;secret&gt; --twitter-source.token &lt;token&gt; --twitter-source.tokenSecret &lt;tokenSecret&gt;]</code><br>
+ *
+ * <p>If no parameters are provided, the program is run with default data from
  * {@link TwitterExampleData}.
- * </p>
- * <p>
- * This example shows how to:
+ *
+ * <p>This example shows how to:
  * <ul>
  * <li>acquire external data,
  * <li>use in-line defined functions,
@@ -113,8 +112,7 @@ public class TwitterExample {
 	/**
 	 * Deserialize JSON from twitter source
 	 *
-	 * <p>
-	 * Implements a string tokenizer that splits sentences into words as a
+	 * <p>Implements a string tokenizer that splits sentences into words as a
 	 * user-defined FlatMapFunction. The function takes a line (String) and
 	 * splits it into multiple pairs in the form of "(word,1)" ({@code Tuple2<String,
 	 * Integer>}).
@@ -123,12 +121,13 @@ public class TwitterExample {
 		private static final long serialVersionUID = 1L;
 
 		private transient ObjectMapper jsonParser;
+
 		/**
-		 * Select the language from the incoming JSON text
+		 * Select the language from the incoming JSON text.
 		 */
 		@Override
 		public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
-			if(jsonParser == null) {
+			if (jsonParser == null) {
 				jsonParser = new ObjectMapper();
 			}
 			JsonNode jsonNode = jsonParser.readValue(value, JsonNode.class);

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/util/TwitterExampleData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/util/TwitterExampleData.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/util/TwitterExampleData.java
index 91b292f..c6454fb 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/util/TwitterExampleData.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/twitter/util/TwitterExampleData.java
@@ -17,7 +17,9 @@
 
 package org.apache.flink.streaming.examples.twitter.util;
 
-//example data looking like tweets, but not acquired from Twitter
+/**
+ * Example data looking like tweets, but not acquired from Twitter.
+ */
 public class TwitterExampleData {
 	public static final String[] TEXTS = new String[] {
 			"{\"created_at\":\"Mon Jan 1 00:00:00 +0000 1901\",\"id\":0,\"id_str\":\"000000000000000000\",\"text\":\"Apache Flink\",\"source\":null,\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":0,\"id_str\":\"0000000000\",\"name\":\"Apache Flink\",\"screen_name\":\"Apache Flink\",\"location\":\"Berlin\",\"protected\":false,\"verified\":false,\"followers_count\":999999,\"friends_count\":99999,\"listed_count\":999,\"favourites_count\":9999,\"statuses_count\":999,\"created_at\":\"Mon Jan 1 00:00:00 +0000 1901\",\"utc_offset\":7200,\"time_zone\":\"Amsterdam\",\"geo_enabled\":false,\"lang\":\"en\",\"entities\":{\"hashtags\":[{\"text\":\"example1\",\"indices\":[0,0]},{\"text\":\"tweet1\",\"indices\":[0,0]}]},\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C6E2EE\",\"profile_background_tile\":false,\"profile_lin
 k_color\":\"1F98C7\",\"profile_sidebar_border_color\":\"FFFFFF\",\"profile_sidebar_fill_color\":\"252429\",\"profile_text_color\":\"666666\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null}",

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/utils/ThrottledIterator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/utils/ThrottledIterator.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/utils/ThrottledIterator.java
index ba7feea..7fd53c3 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/utils/ThrottledIterator.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/utils/ThrottledIterator.java
@@ -74,9 +74,9 @@ public class ThrottledIterator<T> implements Iterator<T>, Serializable {
 		if (lastBatchCheckTime > 0) {
 			if (++num >= sleepBatchSize) {
 				num = 0;
-	
+
 				final long now = System.currentTimeMillis();
-				final long elapsed = now - lastBatchCheckTime; 
+				final long elapsed = now - lastBatchCheckTime;
 				if (elapsed < sleepBatchTime) {
 					try {
 						Thread.sleep(sleepBatchTime - elapsed);

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/GroupedProcessingTimeWindowExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/GroupedProcessingTimeWindowExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/GroupedProcessingTimeWindowExample.java
index f08069b..bedc130 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/GroupedProcessingTimeWindowExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/GroupedProcessingTimeWindowExample.java
@@ -33,41 +33,43 @@ import org.apache.flink.util.Collector;
 
 import static java.util.concurrent.TimeUnit.MILLISECONDS;
 
+/**
+ * Example of grouped processing time windows.
+ */
 @SuppressWarnings("serial")
 public class GroupedProcessingTimeWindowExample {
-	
+
 	public static void main(String[] args) throws Exception {
-		
+
 		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
 		env.setParallelism(4);
-		
+
 		DataStream<Tuple2<Long, Long>> stream = env
 				.addSource(new RichParallelSourceFunction<Tuple2<Long, Long>>() {
-					
+
 					private volatile boolean running = true;
-					
+
 					@Override
 					public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
-						
+
 						final long startTime = System.currentTimeMillis();
-						
+
 						final long numElements = 20000000;
 						final long numKeys = 10000;
 						long val = 1L;
 						long count = 0L;
-						
-						
+
 						while (running && count < numElements) {
 							count++;
 							ctx.collect(new Tuple2<>(val++, 1L));
-							
+
 							if (val > numKeys) {
 								val = 1L;
 							}
 						}
 
 						final long endTime = System.currentTimeMillis();
-						System.out.println("Took " + (endTime-startTime) + " msecs for " + numElements + " values");
+						System.out.println("Took " + (endTime - startTime) + " msecs for " + numElements + " values");
 					}
 
 					@Override
@@ -75,7 +77,7 @@ public class GroupedProcessingTimeWindowExample {
 						running = false;
 					}
 				});
-		
+
 		stream
 			.keyBy(0)
 			.timeWindow(Time.of(2500, MILLISECONDS), Time.of(500, MILLISECONDS))
@@ -85,18 +87,18 @@ public class GroupedProcessingTimeWindowExample {
 //			.keyBy(new FirstFieldKeyExtractor<Tuple2<Long, Long>, Long>())
 //			.window(Time.of(2500, MILLISECONDS), Time.of(500, MILLISECONDS))
 //			.apply(new SummingWindowFunction())
-				
+
 			.addSink(new SinkFunction<Tuple2<Long, Long>>() {
 				@Override
 				public void invoke(Tuple2<Long, Long> value) {
 				}
 			});
-		
+
 		env.execute();
 	}
-	
-	public static class FirstFieldKeyExtractor<Type extends Tuple, Key> implements KeySelector<Type, Key> {
-		
+
+	private static class FirstFieldKeyExtractor<Type extends Tuple, Key> implements KeySelector<Type, Key> {
+
 		@Override
 		@SuppressWarnings("unchecked")
 		public Key getKey(Type value) {
@@ -104,7 +106,7 @@ public class GroupedProcessingTimeWindowExample {
 		}
 	}
 
-	public static class SummingWindowFunction implements WindowFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Long, Window> {
+	private static class SummingWindowFunction implements WindowFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Long, Window> {
 
 		@Override
 		public void apply(Long key, Window window, Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long>> out) {
@@ -117,7 +119,7 @@ public class GroupedProcessingTimeWindowExample {
 		}
 	}
 
-	public static class SummingReducer implements ReduceFunction<Tuple2<Long, Long>> {
+	private static class SummingReducer implements ReduceFunction<Tuple2<Long, Long>> {
 
 		@Override
 		public Tuple2<Long, Long> reduce(Tuple2<Long, Long> value1, Tuple2<Long, Long> value2) {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/SessionWindowing.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/SessionWindowing.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/SessionWindowing.java
index 61616fe..e4f4522 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/SessionWindowing.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/SessionWindowing.java
@@ -30,6 +30,10 @@ import org.apache.flink.streaming.api.windowing.time.Time;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * An example of session windowing where events are keyed by ID and grouped
+ * and counted within session windows with a timeout of 3 time units.
+ */
 public class SessionWindowing {
 
 	@SuppressWarnings("serial")
@@ -58,7 +62,7 @@ public class SessionWindowing {
 		input.add(new Tuple3<>("c", 11L, 1));
 
 		DataStream<Tuple3<String, Long, Integer>> source = env
-				.addSource(new SourceFunction<Tuple3<String,Long,Integer>>() {
+				.addSource(new SourceFunction<Tuple3<String, Long, Integer>>() {
 					private static final long serialVersionUID = 1L;
 
 					@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/TopSpeedWindowing.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/TopSpeedWindowing.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/TopSpeedWindowing.java
index d159aab..ee06cd4 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/TopSpeedWindowing.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/TopSpeedWindowing.java
@@ -23,8 +23,8 @@ import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
 import org.apache.flink.streaming.api.functions.source.SourceFunction;
+import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
 import org.apache.flink.streaming.api.functions.windowing.delta.DeltaFunction;
 import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
 import org.apache.flink.streaming.api.windowing.evictors.TimeEvictor;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/WindowWordCount.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/WindowWordCount.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/WindowWordCount.java
index 38fcd76..ab64575 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/WindowWordCount.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/WindowWordCount.java
@@ -27,22 +27,18 @@ import org.apache.flink.streaming.examples.wordcount.WordCount;
 /**
  * Implements a windowed version of the streaming "WordCount" program.
  *
- * <p>
- * The input is a plain text file with lines separated by newline characters.
- * 
- * <p>
- * Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt; --window &lt;n&gt; --slide &lt;n&gt;</code><br>
+ * <p>The input is a plain text file with lines separated by newline characters.
+ *
+ * <p>Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt; --window &lt;n&gt; --slide &lt;n&gt;</code><br>
  * If no parameters are provided, the program is run with default data from
  * {@link org.apache.flink.examples.java.wordcount.util.WordCountData}.
  *
- * <p>
- * This example shows how to:
+ * <p>This example shows how to:
  * <ul>
  * <li>write a simple Flink Streaming program,
  * <li>use tuple data types,
  * <li>use basic windowing abstractions.
  * </ul>
- *
  */
 public class WindowWordCount {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/SessionWindowingData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/SessionWindowingData.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/SessionWindowingData.java
index c1a99a8..08c8ce1 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/SessionWindowingData.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/SessionWindowingData.java
@@ -17,6 +17,9 @@
 
 package org.apache.flink.streaming.examples.windowing.util;
 
+/**
+ * Data for SessionWindowingITCase.
+ */
 public class SessionWindowingData {
 
 	public static final String EXPECTED = "(a,1,1)\n" + "(c,6,1)\n" + "(c,11,1)\n" + "(b,1,3)\n" +

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/TopSpeedWindowingExampleData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/TopSpeedWindowingExampleData.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/TopSpeedWindowingExampleData.java
index 4718b8b..e53d86c 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/TopSpeedWindowingExampleData.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/windowing/util/TopSpeedWindowingExampleData.java
@@ -17,6 +17,9 @@
 
 package org.apache.flink.streaming.examples.windowing.util;
 
+/**
+ * Data for TopSpeedWindowingExampleITCase.
+ */
 public class TopSpeedWindowingExampleData {
 
 	public static final String CAR_DATA =

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/PojoExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/PojoExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/PojoExample.java
index 8c16172..67f75fd 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/PojoExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/PojoExample.java
@@ -28,22 +28,20 @@ import org.apache.flink.util.Collector;
 /**
  * This example shows an implementation of WordCount without using the Tuple2
  * type, but a custom class.
- * 
- * <p>
- * Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
+ *
+ * <p>Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
  * If no parameters are provided, the program is run with default data from
  * {@link WordCountData}.
- * 
- * <p>
- * This example shows how to:
+ *
+ * <p>This example shows how to:
  * <ul>
  * <li>use POJO data types,
  * <li>write a simple Flink program,
- * <li>write and use user-defined functions. 
+ * <li>write and use user-defined functions.
  * </ul>
  */
 public class PojoExample {
-	
+
 	// *************************************************************************
 	// PROGRAM
 	// *************************************************************************

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/WordCount.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/WordCount.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/WordCount.java
index 4290878..b839bd7 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/WordCount.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/wordcount/WordCount.java
@@ -28,23 +28,19 @@ import org.apache.flink.util.Collector;
 /**
  * Implements the "WordCount" program that computes a simple word occurrence
  * histogram over text files in a streaming fashion.
- * 
- * <p>
- * The input is a plain text file with lines separated by newline characters.
- * 
- * <p>
- * Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
+ *
+ * <p>The input is a plain text file with lines separated by newline characters.
+ *
+ * <p>Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
  * If no parameters are provided, the program is run with default data from
  * {@link WordCountData}.
- * 
- * <p>
- * This example shows how to:
+ *
+ * <p>This example shows how to:
  * <ul>
  * <li>write a simple Flink Streaming program,
  * <li>use tuple data types,
  * <li>write and use user-defined functions.
  * </ul>
- * 
  */
 public class WordCount {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/join/WindowJoin.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/join/WindowJoin.scala b/flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/join/WindowJoin.scala
index 5bf7548..73be261 100644
--- a/flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/join/WindowJoin.scala
+++ b/flink-examples/flink-examples-streaming/src/main/scala/org/apache/flink/streaming/scala/examples/join/WindowJoin.scala
@@ -18,9 +18,9 @@
 
 package org.apache.flink.streaming.scala.examples.join
 
-import org.apache.flink.streaming.api.scala._
 import org.apache.flink.api.java.utils.ParameterTool
 import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.scala._
 import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows
 import org.apache.flink.streaming.api.windowing.time.Time
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
index 81eaa2f..2f9af69 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
@@ -21,9 +21,11 @@ import org.apache.flink.streaming.examples.iteration.IterateExample;
 import org.apache.flink.streaming.examples.iteration.util.IterateExampleData;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link IterateExample}.
+ */
 public class IterateExampleITCase extends StreamingProgramTestBase {
 
-
 	protected String inputPath;
 	protected String resultPath;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
index cca6ada..6b4738a 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
@@ -59,6 +59,6 @@ public class WindowJoinData {
 			"18,jerry,2138\n" + "18,alice,7503\n" + "18,alice,6424\n" + "18,tom,140\n" + "18,john,9802\n" +
 			"19,grace,2977\n" + "19,grace,889\n" + "19,john,1338";
 
-	/** Utility class, should not be instantiated */
+	/** Utility class, should not be instantiated. */
 	private WindowJoinData() {}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
index 736438f..264ce55 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.streaming.test.exampleJavaPrograms.join;
 
-import org.apache.commons.io.FileUtils;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.core.fs.FileSystem.WriteMode;
@@ -28,10 +27,14 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.examples.join.WindowJoin;
 import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase;
 
+import org.apache.commons.io.FileUtils;
 import org.junit.Test;
 
 import java.io.File;
 
+/**
+ * Tests for {@link WindowJoin}.
+ */
 @SuppressWarnings("serial")
 public class WindowJoinITCase extends StreamingMultipleProgramsTestBase {
 
@@ -41,19 +44,19 @@ public class WindowJoinITCase extends StreamingMultipleProgramsTestBase {
 		try {
 			final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
 			env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
-			
+
 			DataStream<Tuple2<String, Integer>> grades = env
 					.fromElements(WindowJoinData.GRADES_INPUT.split("\n"))
 					.map(new Parser());
-	
+
 			DataStream<Tuple2<String, Integer>> salaries = env
 					.fromElements(WindowJoinData.SALARIES_INPUT.split("\n"))
 					.map(new Parser());
-			
+
 			WindowJoin
 					.runWindowJoin(grades, salaries, 100)
 					.writeAsText(resultPath, WriteMode.OVERWRITE);
-			
+
 			env.execute();
 
 			// since the two sides of the join might have different speed
@@ -67,10 +70,10 @@ public class WindowJoinITCase extends StreamingMultipleProgramsTestBase {
 			} catch (Throwable ignored) {}
 		}
 	}
-	
+
 	//-------------------------------------------------------------------------
-	
-	public static final class Parser implements MapFunction<String, Tuple2<String, Integer>> {
+
+	private static final class Parser implements MapFunction<String, Tuple2<String, Integer>> {
 
 		@Override
 		public Tuple2<String, Integer> map(String value) throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
index 7097a10..90f6845 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
@@ -21,6 +21,9 @@ import org.apache.flink.streaming.examples.ml.IncrementalLearningSkeleton;
 import org.apache.flink.streaming.examples.ml.util.IncrementalLearningSkeletonData;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link IncrementalLearningSkeleton}.
+ */
 public class IncrementalLearningSkeletonITCase extends StreamingProgramTestBase {
 
 	protected String resultPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
index 7d764f3..2be83c6 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
@@ -21,6 +21,9 @@ import org.apache.flink.streaming.examples.twitter.TwitterExample;
 import org.apache.flink.streaming.examples.twitter.util.TwitterExampleData;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link TwitterExample}.
+ */
 public class TwitterStreamITCase extends StreamingProgramTestBase {
 	protected String resultPath;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
index 28255dc..a3c3175 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
@@ -21,6 +21,9 @@ import org.apache.flink.streaming.examples.windowing.SessionWindowing;
 import org.apache.flink.streaming.examples.windowing.util.SessionWindowingData;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link SessionWindowing}.
+ */
 public class SessionWindowingITCase extends StreamingProgramTestBase {
 
 	protected String resultPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
index f15ab13..e9f9f51 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
@@ -21,8 +21,11 @@ import org.apache.flink.streaming.examples.windowing.TopSpeedWindowing;
 import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link TopSpeedWindowing}.
+ */
 public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
-	
+
 	protected String textPath;
 	protected String resultPath;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
index ef5cf1c..ad36582 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
@@ -22,6 +22,9 @@ import org.apache.flink.streaming.examples.windowing.WindowWordCount;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 import org.apache.flink.test.testdata.WordCountData;
 
+/**
+ * Tests for {@link WindowWordCount}.
+ */
 public class WindowWordCountITCase extends StreamingProgramTestBase {
 
 	protected String textPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
index 3c69670..609b69d 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
@@ -22,6 +22,9 @@ import org.apache.flink.streaming.examples.wordcount.PojoExample;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 import org.apache.flink.test.testdata.WordCountData;
 
+/**
+ * Tests for {@link PojoExample}.
+ */
 public class PojoExampleITCase extends StreamingProgramTestBase {
 
 	protected String textPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
index 153a055..ef57794 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
@@ -22,6 +22,9 @@ import org.apache.flink.streaming.examples.wordcount.WordCount;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 import org.apache.flink.test.testdata.WordCountData;
 
+/**
+ * Tests for {@link WordCount}.
+ */
 public class WordCountITCase extends StreamingProgramTestBase {
 
 	protected String textPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
index de4fd1b..c174429 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
@@ -17,10 +17,13 @@
 
 package org.apache.flink.streaming.test.exampleScalaPrograms.windowing;
 
-import org.apache.flink.streaming.scala.examples.windowing.TopSpeedWindowing;
 import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
+import org.apache.flink.streaming.scala.examples.windowing.TopSpeedWindowing;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
 
+/**
+ * Tests for {@link TopSpeedWindowing}.
+ */
 public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
 	protected String textPath;
 	protected String resultPath;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java
index c6f46e3..a09b22e 100644
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/socket/SocketWindowWordCountITCase.java
@@ -18,8 +18,8 @@
 package org.apache.flink.streaming.test.socket;
 
 import org.apache.flink.configuration.ConfigConstants;
+import org.apache.flink.streaming.examples.socket.SocketWindowWordCount;
 import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase;
-
 import org.apache.flink.test.testdata.WordCountData;
 
 import org.junit.Test;
@@ -35,37 +35,39 @@ import java.net.Socket;
 
 import static org.junit.Assert.fail;
 
+/**
+ * Tests for {@link SocketWindowWordCount}.
+ */
 public class SocketWindowWordCountITCase extends StreamingMultipleProgramsTestBase {
-	
+
 	@Test
 	public void testJavaProgram() throws Exception {
 		InetAddress localhost = InetAddress.getByName("localhost");
-		
+
 		// suppress sysout messages from this example
 		final PrintStream originalSysout = System.out;
 		final PrintStream originalSyserr = System.err;
-		
+
 		final ByteArrayOutputStream errorMessages = new ByteArrayOutputStream();
-		
+
 		System.setOut(new PrintStream(new NullStream()));
 		System.setErr(new PrintStream(errorMessages));
-		
+
 		try {
 			try (ServerSocket server = new ServerSocket(0, 10, localhost)) {
-				
+
 				final ServerThread serverThread = new ServerThread(server);
 				serverThread.setDaemon(true);
 				serverThread.start();
-				
+
 				final int serverPort = server.getLocalPort();
 
-				org.apache.flink.streaming.examples.socket.SocketWindowWordCount.main(
-						new String[] { "--port", String.valueOf(serverPort) });
+				SocketWindowWordCount.main(new String[] { "--port", String.valueOf(serverPort) });
 
 				if (errorMessages.size() != 0) {
 					fail("Found error message: " + new String(errorMessages.toByteArray(), ConfigConstants.DEFAULT_CHARSET));
 				}
-				
+
 				serverThread.join();
 				serverThread.checkError();
 			}
@@ -104,7 +106,7 @@ public class SocketWindowWordCountITCase extends StreamingMultipleProgramsTestBa
 				if (errorMessages.size() != 0) {
 					fail("Found error message: " + new String(errorMessages.toByteArray(), ConfigConstants.DEFAULT_CHARSET));
 				}
-				
+
 				serverThread.join();
 				serverThread.checkError();
 			}
@@ -114,7 +116,7 @@ public class SocketWindowWordCountITCase extends StreamingMultipleProgramsTestBa
 			System.setErr(originalSyserr);
 		}
 	}
-	
+
 	// ------------------------------------------------------------------------
 
 	private static class ServerThread extends Thread {
@@ -122,19 +124,19 @@ public class SocketWindowWordCountITCase extends StreamingMultipleProgramsTestBa
 		private final ServerSocket serverSocket;
 
 		private volatile Throwable error;
-		
+
 		public ServerThread(ServerSocket serverSocket) {
 			super("Socket Server Thread");
-			
+
 			this.serverSocket = serverSocket;
 		}
 
 		@Override
 		public void run() {
 			try {
-				try (Socket socket = serverSocket.accept(); 
+				try (Socket socket = serverSocket.accept();
 						PrintWriter writer = new PrintWriter(socket.getOutputStream(), true)) {
-					
+
 					writer.println(WordCountData.TEXT);
 				}
 			}
@@ -142,16 +144,16 @@ public class SocketWindowWordCountITCase extends StreamingMultipleProgramsTestBa
 				this.error = t;
 			}
 		}
-		
+
 		public void checkError() throws IOException {
 			if (error != null) {
 				throw new IOException("Error in server thread: " + error.getMessage(), error);
 			}
 		}
 	}
-	
+
 	private static final class NullStream extends OutputStream {
-		
+
 		@Override
 		public void write(int b) {}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/resources/log4j-test.properties
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/resources/log4j-test.properties b/flink-examples/flink-examples-streaming/src/test/resources/log4j-test.properties
index 555f17e..b3184ea 100644
--- a/flink-examples/flink-examples-streaming/src/test/resources/log4j-test.properties
+++ b/flink-examples/flink-examples-streaming/src/test/resources/log4j-test.properties
@@ -20,4 +20,4 @@ log4j.rootLogger=OFF, console
 
 log4j.appender.console=org.apache.log4j.ConsoleAppender
 log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
+log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala b/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
index 88953c5..93f262d 100644
--- a/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
+++ b/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
@@ -21,16 +21,14 @@ package org.apache.flink.streaming.scala.examples
 import java.io.File
 
 import org.apache.commons.io.FileUtils
-
-import org.apache.flink.streaming.api.scala._
 import org.apache.flink.core.fs.FileSystem.WriteMode
 import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.scala._
 import org.apache.flink.streaming.scala.examples.join.WindowJoin
-import org.apache.flink.streaming.scala.examples.join.WindowJoin.{Grade, Person, Salary}
+import org.apache.flink.streaming.scala.examples.join.WindowJoin.{Grade, Salary}
 import org.apache.flink.streaming.test.exampleJavaPrograms.join.WindowJoinData
 import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase
 import org.apache.flink.test.util.TestBaseUtils
-
 import org.junit.Test
 
 class WindowJoinITCase extends StreamingMultipleProgramsTestBase {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountSQL.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountSQL.java b/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountSQL.java
index 9e1b45e..c4a36f0 100644
--- a/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountSQL.java
+++ b/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountSQL.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.table.examples.java;
 
 import org.apache.flink.api.java.DataSet;
@@ -26,11 +27,10 @@ import org.apache.flink.table.api.java.BatchTableEnvironment;
 /**
  * Simple example that shows how the Batch SQL API is used in Java.
  *
- * This example shows how to:
+ * <p>This example shows how to:
  *  - Convert DataSets to Tables
  *  - Register a Table under a name
  *  - Run a SQL query on the registered Table
- *
  */
 public class WordCountSQL {
 
@@ -65,13 +65,12 @@ public class WordCountSQL {
 	//     USER DATA TYPES
 	// *************************************************************************
 
-	public static class WC {
+	private static class WC {
 		public String word;
 		public long frequency;
 
 		// public constructor to make it a Flink POJO
-		public WC() {
-		}
+		public WC() {}
 
 		public WC(String word, long frequency) {
 			this.word = word;
@@ -83,5 +82,4 @@ public class WordCountSQL {
 			return "WC " + word + " " + frequency;
 		}
 	}
-
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountTable.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountTable.java b/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountTable.java
index 1ee8c12..8606cd8 100644
--- a/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountTable.java
+++ b/flink-examples/flink-examples-table/src/main/java/org/apache/flink/table/examples/java/WordCountTable.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.table.examples.java;
 
 import org.apache.flink.api.java.DataSet;
@@ -24,13 +25,12 @@ import org.apache.flink.table.api.TableEnvironment;
 import org.apache.flink.table.api.java.BatchTableEnvironment;
 
 /**
-  * Simple example for demonstrating the use of the Table API for a Word Count in Java.
-  *
-  * This example shows how to:
-  *  - Convert DataSets to Tables
-  *  - Apply group, aggregate, select, and filter operations
-  *
-  */
+ * Simple example for demonstrating the use of the Table API for a Word Count in Java.
+ *
+ * <p>This example shows how to:
+ *  - Convert DataSets to Tables
+ *  - Apply group, aggregate, select, and filter operations
+ */
 public class WordCountTable {
 
 	// *************************************************************************
@@ -62,14 +62,12 @@ public class WordCountTable {
 	//     USER DATA TYPES
 	// *************************************************************************
 
-	public static class WC {
+	private static class WC {
 		public String word;
 		public long frequency;
 
 		// public constructor to make it a Flink POJO
-		public WC() {
-
-		}
+		public WC() {}
 
 		public WC(String word, long frequency) {
 			this.word = word;
@@ -81,5 +79,4 @@ public class WordCountTable {
 			return "WC " + word + " " + frequency;
 		}
 	}
-
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/pom.xml
----------------------------------------------------------------------
diff --git a/flink-examples/pom.xml b/flink-examples/pom.xml
index 644d9eb..38feb3d 100644
--- a/flink-examples/pom.xml
+++ b/flink-examples/pom.xml
@@ -32,6 +32,12 @@ under the License.
 	<name>flink-examples</name>
 	<packaging>pom</packaging>
 
+	<modules>
+		<module>flink-examples-batch</module>
+		<module>flink-examples-streaming</module>
+		<module>flink-examples-table</module>
+	</modules>
+
 	<dependencies>
 
 		<!-- Flink dependencies -->
@@ -65,9 +71,43 @@ under the License.
 
 	</dependencies>
 
-	<modules>
-		<module>flink-examples-batch</module>
-		<module>flink-examples-streaming</module>
-		<module>flink-examples-table</module>
-	</modules>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<dependencies>
+					<dependency>
+						<groupId>com.puppycrawl.tools</groupId>
+						<artifactId>checkstyle</artifactId>
+						<version>6.19</version>
+					</dependency>
+				</dependencies>
+				<configuration>
+					<configLocation>/tools/maven/strict-checkstyle.xml</configLocation>
+					<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+					<logViolationsToConsole>true</logViolationsToConsole>
+					<failOnViolation>true</failOnViolation>
+				</configuration>
+				<executions>
+					<!--
+					Execute checkstyle after compilation but before tests.
+
+					This ensures that any parsing or type checking errors are from
+					javac, so they look as expected. Beyond that, we want to
+					fail as early as possible.
+					-->
+					<execution>
+						<phase>test-compile</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+
 </project>


[4/7] flink git commit: [FLINK-6707] [examples] Activate strict checkstyle for flink-examples

Posted by ch...@apache.org.
[FLINK-6707] [examples] Activate strict checkstyle for flink-examples


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/789ed8a8
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/789ed8a8
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/789ed8a8

Branch: refs/heads/master
Commit: 789ed8a8246d140e1621a5860645a747132d6618
Parents: d481f29
Author: Greg Hogan <co...@greghogan.com>
Authored: Wed May 24 13:24:02 2017 -0400
Committer: zentol <ch...@apache.org>
Committed: Thu May 25 09:48:30 2017 +0200

----------------------------------------------------------------------
 .../flink/examples/java/clustering/KMeans.java  |  31 +-
 .../java/clustering/util/KMeansData.java        |   4 +-
 .../clustering/util/KMeansDataGenerator.java    |  68 +-
 .../flink/examples/java/distcp/DistCp.java      |  12 +-
 .../examples/java/distcp/FileCopyTask.java      |   9 +-
 .../java/distcp/FileCopyTaskInputFormat.java    |   6 +-
 .../java/distcp/FileCopyTaskInputSplit.java     |   6 +-
 .../java/graph/ConnectedComponents.java         |  74 +-
 .../examples/java/graph/EnumTriangles.java      |  70 +-
 .../flink/examples/java/graph/PageRank.java     |  84 +--
 .../java/graph/TransitiveClosureNaive.java      |  22 +-
 .../graph/util/ConnectedComponentsData.java     |  18 +-
 .../java/graph/util/EnumTrianglesData.java      |  19 +-
 .../java/graph/util/EnumTrianglesDataTypes.java | 128 ++--
 .../examples/java/graph/util/PageRankData.java  |  22 +-
 .../java/misc/CollectionExecutionExample.java   |  50 +-
 .../flink/examples/java/misc/PiEstimation.java  |  35 +-
 .../examples/java/ml/LinearRegression.java      |  80 +--
 .../java/ml/util/LinearRegressionData.java      |   2 +-
 .../ml/util/LinearRegressionDataGenerator.java  |  21 +-
 .../relational/EmptyFieldsCountAccumulator.java |  28 +-
 .../examples/java/relational/TPCHQuery10.java   | 107 ++-
 .../examples/java/relational/TPCHQuery3.java    | 183 +++--
 .../java/relational/WebLogAnalysis.java         |  72 +-
 .../java/relational/util/WebLogData.java        | 709 +++++++++----------
 .../relational/util/WebLogDataGenerator.java    |  27 +-
 .../examples/java/wordcount/WordCount.java      |  33 +-
 .../examples/java/wordcount/WordCountPojo.java  |  38 +-
 .../examples/scala/graph/EnumTriangles.scala    |  11 +-
 .../examples/scala/graph/PageRankBasic.scala    |   3 +-
 .../examples/scala/relational/TPCHQuery10.scala |   3 +-
 .../examples/scala/relational/TPCHQuery3.scala  |   3 +-
 .../examples/async/AsyncIOExample.java          |  13 +-
 .../examples/iteration/IterateExample.java      |  14 +-
 .../iteration/util/IterateExampleData.java      |   3 +
 .../streaming/examples/join/WindowJoin.java     |  17 +-
 .../streaming/examples/kafka/ReadFromKafka.java |   5 +-
 .../examples/kafka/WriteIntoKafka.java          |  12 +-
 .../ml/IncrementalLearningSkeleton.java         |   8 +-
 .../util/IncrementalLearningSkeletonData.java   |   3 +
 .../examples/sideoutput/SideOutputExample.java  |  16 +-
 .../examples/socket/SocketWindowWordCount.java  |   4 +-
 .../examples/twitter/TwitterExample.java        |  31 +-
 .../twitter/util/TwitterExampleData.java        |   4 +-
 .../examples/utils/ThrottledIterator.java       |   4 +-
 .../GroupedProcessingTimeWindowExample.java     |  40 +-
 .../examples/windowing/SessionWindowing.java    |   6 +-
 .../examples/windowing/TopSpeedWindowing.java   |   2 +-
 .../examples/windowing/WindowWordCount.java     |  12 +-
 .../windowing/util/SessionWindowingData.java    |   3 +
 .../util/TopSpeedWindowingExampleData.java      |   3 +
 .../examples/wordcount/PojoExample.java         |  14 +-
 .../streaming/examples/wordcount/WordCount.java |  16 +-
 .../scala/examples/join/WindowJoin.scala        |   2 +-
 .../iteration/IterateExampleITCase.java         |   4 +-
 .../join/WindowJoinData.java                    |   2 +-
 .../join/WindowJoinITCase.java                  |  19 +-
 .../ml/IncrementalLearningSkeletonITCase.java   |   3 +
 .../twitter/TwitterStreamITCase.java            |   3 +
 .../windowing/SessionWindowingITCase.java       |   3 +
 .../TopSpeedWindowingExampleITCase.java         |   5 +-
 .../windowing/WindowWordCountITCase.java        |   3 +
 .../wordcount/PojoExampleITCase.java            |   3 +
 .../wordcount/WordCountITCase.java              |   3 +
 .../TopSpeedWindowingExampleITCase.java         |   5 +-
 .../socket/SocketWindowWordCountITCase.java     |  42 +-
 .../src/test/resources/log4j-test.properties    |   2 +-
 .../scala/examples/WindowJoinITCase.scala       |   6 +-
 .../flink/table/examples/java/WordCountSQL.java |  10 +-
 .../table/examples/java/WordCountTable.java     |  21 +-
 flink-examples/pom.xml                          |  50 +-
 71 files changed, 1251 insertions(+), 1143 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
index 8e51df8..101eda3 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
@@ -18,27 +18,26 @@
 
 package org.apache.flink.examples.java.clustering;
 
-import java.io.Serializable;
-import java.util.Collection;
-
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.common.functions.ReduceFunction;
 import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
+import org.apache.flink.api.java.operators.IterativeDataSet;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.examples.java.clustering.util.KMeansData;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.api.java.operators.IterativeDataSet;
+
+import java.io.Serializable;
+import java.util.Collection;
 
 /**
  * This example implements a basic K-Means clustering algorithm.
  *
- * <p>
- * K-Means is an iterative clustering algorithm and works as follows:<br>
+ * <p>K-Means is an iterative clustering algorithm and works as follows:<br>
  * K-Means is given a set of data points to be clustered and an initial set of <i>K</i> cluster centers.
  * In each iteration, the algorithm computes the distance of each data point to each cluster center.
  * Each point is assigned to the cluster center which is closest to it.
@@ -48,13 +47,11 @@ import org.apache.flink.api.java.operators.IterativeDataSet;
  * or if cluster centers do not (significantly) move in an iteration.<br>
  * This is the Wikipedia entry for the <a href="http://en.wikipedia.org/wiki/K-means_clustering">K-Means Clustering algorithm</a>.
  *
- * <p>
- * This implementation works on two-dimensional data points. <br>
+ * <p>This implementation works on two-dimensional data points. <br>
  * It computes an assignment of data points to cluster centers, i.e.,
  * each data point is annotated with the id of the final cluster (center) it belongs to.
  *
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
  * <li>Data points are represented as two double values separated by a blank character.
  * Data points are separated by newline characters.<br>
@@ -63,12 +60,10 @@ import org.apache.flink.api.java.operators.IterativeDataSet;
  * For example <code>"1 6.2 3.2\n2 2.9 5.7\n"</code> gives two centers (id=1, x=6.2, y=3.2) and (id=2, x=2.9, y=5.7).
  * </ul>
  *
- * <p>
- * Usage: <code>KMeans --points &lt;path&gt; --centroids &lt;path&gt; --output &lt;path&gt; --iterations &lt;n&gt;</code><br>
+ * <p>Usage: <code>KMeans --points &lt;path&gt; --centroids &lt;path&gt; --output &lt;path&gt; --iterations &lt;n&gt;</code><br>
  * If no parameters are provided, the program is run with default data from {@link org.apache.flink.examples.java.clustering.util.KMeansData} and 10 iterations.
  *
- * <p>
- * This example shows how to use:
+ * <p>This example shows how to use:
  * <ul>
  * <li>Bulk iterations
  * <li>Broadcast variables in bulk iterations
@@ -187,7 +182,7 @@ public class KMeans {
 		}
 
 		public double euclideanDistance(Point other) {
-			return Math.sqrt((x-other.x)*(x-other.x) + (y-other.y)*(y-other.y));
+			return Math.sqrt((x - other.x) * (x - other.x) + (y - other.y) * (y - other.y));
 		}
 
 		public void clear() {
@@ -210,7 +205,7 @@ public class KMeans {
 		public Centroid() {}
 
 		public Centroid(int id, double x, double y) {
-			super(x,y);
+			super(x, y);
 			this.id = id;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansData.java
index e165612..24c30a8 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansData.java
@@ -92,7 +92,7 @@ public class KMeansData {
 		}
 		return env.fromCollection(centroidList);
 	}
-	
+
 	public static DataSet<Point> getDefaultPointDataSet(ExecutionEnvironment env) {
 		List<Point> pointList = new LinkedList<Point>();
 		for (Object[] point : POINTS) {
@@ -100,5 +100,5 @@ public class KMeansData {
 		}
 		return env.fromCollection(pointList);
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
index 8f48d0a..9f7c98d 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
@@ -16,9 +16,11 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.examples.java.clustering.util;
 
+import org.apache.flink.api.java.utils.ParameterTool;
+import org.apache.flink.examples.java.clustering.KMeans;
+
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
@@ -27,18 +29,15 @@ import java.text.DecimalFormat;
 import java.util.Locale;
 import java.util.Random;
 
-import org.apache.flink.api.java.utils.ParameterTool;
-import org.apache.flink.examples.java.clustering.KMeans;
-
 /**
  * Generates data for the {@link KMeans} example program.
  */
 public class KMeansDataGenerator {
-	
+
 	static {
 		Locale.setDefault(Locale.US);
 	}
-	
+
 	private static final String CENTERS_FILE = "centers";
 	private static final String POINTS_FILE = "points";
 	private static final long DEFAULT_SEED = 4650285087650871364L;
@@ -50,14 +49,14 @@ public class KMeansDataGenerator {
 
 	/**
 	 * Main method to generate data for the {@link KMeans} example program.
-	 * <p>
-	 * The generator creates to files:
+	 *
+	 * <p>The generator creates to files:
 	 * <ul>
 	 * <li><code>&lt; output-path &gt;/points</code> for the data points
 	 * <li><code>&lt; output-path &gt;/centers</code> for the cluster centers
-	 * </ul> 
-	 * 
-	 * @param args 
+	 * </ul>
+	 *
+	 * @param args
 	 * <ol>
 	 * <li>Int: Number of data points
 	 * <li>Int: Number of cluster centers
@@ -87,22 +86,21 @@ public class KMeansDataGenerator {
 		final double range = params.getDouble("range", DEFAULT_VALUE_RANGE);
 		final long firstSeed = params.getLong("seed", DEFAULT_SEED);
 
-		
 		final double absoluteStdDev = stddev * range;
 		final Random random = new Random(firstSeed);
-		
+
 		// the means around which data points are distributed
 		final double[][] means = uniformRandomCenters(random, k, DIMENSIONALITY, range);
-		
+
 		// write the points out
 		BufferedWriter pointsOut = null;
 		try {
-			pointsOut = new BufferedWriter(new FileWriter(new File(outDir+"/"+POINTS_FILE)));
+			pointsOut = new BufferedWriter(new FileWriter(new File(outDir + "/" + POINTS_FILE)));
 			StringBuilder buffer = new StringBuilder();
-			
+
 			double[] point = new double[DIMENSIONALITY];
 			int nextCentroid = 0;
-			
+
 			for (int i = 1; i <= numDataPoints; i++) {
 				// generate a point for the current centroid
 				double[] centroid = means[nextCentroid];
@@ -118,15 +116,15 @@ public class KMeansDataGenerator {
 				pointsOut.close();
 			}
 		}
-		
+
 		// write the uniformly distributed centers to a file
 		BufferedWriter centersOut = null;
 		try {
-			centersOut = new BufferedWriter(new FileWriter(new File(outDir+"/"+CENTERS_FILE)));
+			centersOut = new BufferedWriter(new FileWriter(new File(outDir + "/" + CENTERS_FILE)));
 			StringBuilder buffer = new StringBuilder();
-			
+
 			double[][] centers = uniformRandomCenters(random, k, DIMENSIONALITY, range);
-			
+
 			for (int i = 0; i < k; i++) {
 				writeCenter(i + 1, centers[i], buffer, centersOut);
 			}
@@ -136,41 +134,41 @@ public class KMeansDataGenerator {
 				centersOut.close();
 			}
 		}
-		
-		System.out.println("Wrote "+numDataPoints+" data points to "+outDir+"/"+POINTS_FILE);
-		System.out.println("Wrote "+k+" cluster centers to "+outDir+"/"+CENTERS_FILE);
+
+		System.out.println("Wrote " + numDataPoints + " data points to " + outDir + "/" + POINTS_FILE);
+		System.out.println("Wrote " + k + " cluster centers to " + outDir + "/" + CENTERS_FILE);
 	}
-	
+
 	private static double[][] uniformRandomCenters(Random rnd, int num, int dimensionality, double range) {
 		final double halfRange = range / 2;
 		final double[][] points = new double[num][dimensionality];
-		
+
 		for (int i = 0; i < num; i++) {
-			for (int dim = 0; dim < dimensionality; dim ++) {
+			for (int dim = 0; dim < dimensionality; dim++) {
 				points[i][dim] = (rnd.nextDouble() * range) - halfRange;
 			}
 		}
 		return points;
 	}
-	
+
 	private static void writePoint(double[] coordinates, StringBuilder buffer, BufferedWriter out) throws IOException {
 		buffer.setLength(0);
-		
+
 		// write coordinates
 		for (int j = 0; j < coordinates.length; j++) {
 			buffer.append(FORMAT.format(coordinates[j]));
-			if(j < coordinates.length - 1) {
+			if (j < coordinates.length - 1) {
 				buffer.append(DELIMITER);
 			}
 		}
-		
+
 		out.write(buffer.toString());
 		out.newLine();
 	}
-	
+
 	private static void writeCenter(long id, double[] coordinates, StringBuilder buffer, BufferedWriter out) throws IOException {
 		buffer.setLength(0);
-		
+
 		// write id
 		buffer.append(id);
 		buffer.append(DELIMITER);
@@ -178,11 +176,11 @@ public class KMeansDataGenerator {
 		// write coordinates
 		for (int j = 0; j < coordinates.length; j++) {
 			buffer.append(FORMAT.format(coordinates[j]));
-			if(j < coordinates.length - 1) {
+			if (j < coordinates.length - 1) {
 				buffer.append(DELIMITER);
 			}
 		}
-		
+
 		out.write(buffer.toString());
 		out.newLine();
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
index 82f1c52..a358490 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.examples.java.distcp;
 
-import org.apache.commons.io.IOUtils;
-
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.common.functions.RichFlatMapFunction;
 import org.apache.flink.api.java.DataSet;
@@ -37,6 +35,7 @@ import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.util.Collector;
 
+import org.apache.commons.io.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,12 +51,12 @@ import java.util.Map;
  * (see <a href="http://hadoop.apache.org/docs/r1.2.1/distcp.html">http://hadoop.apache.org/docs/r1.2.1/distcp.html</a>)
  * with a dynamic input format
  * Note that this tool does not deal with retriability. Additionally, empty directories are not copied over.
- * <p>
- * When running locally, local file systems paths can be used.
+ *
+ * <p>When running locally, local file systems paths can be used.
  * However, in a distributed environment HDFS paths must be provided both as input and output.
  */
 public class DistCp {
-	
+
 	private static final Logger LOGGER = LoggerFactory.getLogger(DistCp.class);
 	public static final String BYTES_COPIED_CNT_NAME = "BYTES_COPIED";
 	public static final String FILES_COPIED_CNT_NAME = "FILES_COPIED";
@@ -100,7 +99,6 @@ public class DistCp {
 				new FileCopyTaskInputFormat(tasks),
 				new GenericTypeInfo<>(FileCopyTask.class), "fileCopyTasks");
 
-
 		FlatMapOperator<FileCopyTask, Object> res = inputTasks.flatMap(new RichFlatMapFunction<FileCopyTask, Object>() {
 
 			private static final long serialVersionUID = 1109254230243989929L;
@@ -139,7 +137,7 @@ public class DistCp {
 					IOUtils.closeQuietly(inputStream);
 					IOUtils.closeQuietly(outputStream);
 				}
-				fileCounter.add(1l);
+				fileCounter.add(1L);
 			}
 		});
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTask.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTask.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTask.java
index 7f38a8b..4a8f38b 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTask.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTask.java
@@ -18,18 +18,19 @@
 
 package org.apache.flink.examples.java.distcp;
 
-import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.core.fs.Path;
 
+import org.apache.commons.lang3.StringUtils;
+
 import java.io.Serializable;
 
 /**
- * A Java POJO that represents a task for copying a single file
+ * A Java POJO that represents a task for copying a single file.
  */
 public class FileCopyTask implements Serializable {
-	
+
 	private static final long serialVersionUID = -8760082278978316032L;
-	
+
 	private final Path path;
 	private final String relativePath;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputFormat.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputFormat.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputFormat.java
index d6e6713..dfd9bf0 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputFormat.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputFormat.java
@@ -35,14 +35,14 @@ import java.util.Queue;
 
 /**
  * An implementation of an input format that dynamically assigns {@code FileCopyTask} to the mappers
- * that have finished previously assigned tasks
+ * that have finished previously assigned tasks.
  */
 public class FileCopyTaskInputFormat implements InputFormat<FileCopyTask, FileCopyTaskInputSplit> {
 
 	private static final long serialVersionUID = -644394866425221151L;
-	
+
 	private static final Logger LOGGER = LoggerFactory.getLogger(FileCopyTaskInputFormat.class);
-	
+
 
 	private final List<FileCopyTask> tasks;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputSplit.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputSplit.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputSplit.java
index 33943b6..b7ec0c9 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputSplit.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/distcp/FileCopyTaskInputSplit.java
@@ -21,12 +21,12 @@ package org.apache.flink.examples.java.distcp;
 import org.apache.flink.core.io.InputSplit;
 
 /**
- * Implementation of {@code InputSplit} for copying files
+ * Implementation of {@code InputSplit} for copying files.
  */
 public class FileCopyTaskInputSplit implements InputSplit {
-	
+
 	private static final long serialVersionUID = -7621656017747660450L;
-	
+
 	private final FileCopyTask task;
 	private final int splitNumber;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/ConnectedComponents.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/ConnectedComponents.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/ConnectedComponents.java
index 3bd6522..9568b31 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/ConnectedComponents.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/ConnectedComponents.java
@@ -16,69 +16,63 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.examples.java.graph;
 
 import org.apache.flink.api.common.functions.FlatJoinFunction;
 import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.common.functions.JoinFunction;
 import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.aggregation.Aggregations;
 import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
 import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst;
 import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond;
+import org.apache.flink.api.java.operators.DeltaIteration;
 import org.apache.flink.api.java.tuple.Tuple1;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.examples.java.graph.util.ConnectedComponentsData;
 import org.apache.flink.util.Collector;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.operators.DeltaIteration;
-import org.apache.flink.api.java.ExecutionEnvironment;
 
 /**
  * An implementation of the connected components algorithm, using a delta iteration.
- * 
- * <p>
- * Initially, the algorithm assigns each vertex an unique ID. In each step, a vertex picks the minimum of its own ID and its
+ *
+ * <p>Initially, the algorithm assigns each vertex an unique ID. In each step, a vertex picks the minimum of its own ID and its
  * neighbors' IDs, as its new ID and tells its neighbors about its new ID. After the algorithm has completed, all vertices in the
  * same component will have the same ID.
- * 
- * <p>
- * A vertex whose component ID did not change needs not propagate its information in the next step. Because of that,
+ *
+ * <p>A vertex whose component ID did not change needs not propagate its information in the next step. Because of that,
  * the algorithm is easily expressible via a delta iteration. We here model the solution set as the vertices with
  * their current component ids, and the workset as the changed vertices. Because we see all vertices initially as
  * changed, the initial workset and the initial solution set are identical. Also, the delta to the solution set
  * is consequently also the next workset.<br>
- * 
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ *
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
- * <li>Vertices represented as IDs and separated by new-line characters.<br> 
+ * <li>Vertices represented as IDs and separated by new-line characters.<br>
  * For example <code>"1\n2\n12\n42\n63"</code> gives five vertices (1), (2), (12), (42), and (63).
- * <li>Edges are represented as pairs for vertex IDs which are separated by space 
+ * <li>Edges are represented as pairs for vertex IDs which are separated by space
  * characters. Edges are separated by new-line characters.<br>
  * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (undirected) edges (1)-(2), (2)-(12), (1)-(12), and (42)-(63).
  * </ul>
- * 
- * <p>
- * Usage: <code>ConnectedComponents --vertices &lt;path&gt; --edges &lt;path&gt; --output &lt;path&gt; --iterations &lt;n&gt;</code><br>
- * If no parameters are provided, the program is run with default data from {@link org.apache.flink.examples.java.graph.util.ConnectedComponentsData} and 10 iterations. 
- * 
- * <p>
- * This example shows how to use:
+ *
+ * <p>Usage: <code>ConnectedComponents --vertices &lt;path&gt; --edges &lt;path&gt; --output &lt;path&gt; --iterations &lt;n&gt;</code><br>
+ * If no parameters are provided, the program is run with default data from {@link org.apache.flink.examples.java.graph.util.ConnectedComponentsData} and 10 iterations.
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li>Delta Iterations
- * <li>Generic-typed Functions 
+ * <li>Generic-typed Functions
  * </ul>
  */
 @SuppressWarnings("serial")
 public class ConnectedComponents {
-	
+
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String... args) throws Exception {
 
 		// Checking input parameters
@@ -91,19 +85,19 @@ public class ConnectedComponents {
 
 		// make parameters available in the web interface
 		env.getConfig().setGlobalJobParameters(params);
-		
+
 		// read vertex and edge data
 		DataSet<Long> vertices = getVertexDataSet(env, params);
 		DataSet<Tuple2<Long, Long>> edges = getEdgeDataSet(env, params).flatMap(new UndirectEdge());
-		
+
 		// assign the initial components (equal to the vertex id)
 		DataSet<Tuple2<Long, Long>> verticesWithInitialId =
 			vertices.map(new DuplicateValue<Long>());
-				
+
 		// open a delta iteration
 		DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration =
 				verticesWithInitialId.iterateDelta(verticesWithInitialId, maxIterations, 0);
-		
+
 		// apply the step logic: join with the edges, select the minimum neighbor, update if the component of the candidate is smaller
 		DataSet<Tuple2<Long, Long>> changes = iteration.getWorkset().join(edges).where(0).equalTo(0).with(new NeighborWithComponentIDJoin())
 				.groupBy(0).aggregate(Aggregations.MIN, 1)
@@ -112,7 +106,7 @@ public class ConnectedComponents {
 
 		// close the delta iteration (delta and new workset are identical)
 		DataSet<Tuple2<Long, Long>> result = iteration.closeWith(changes, changes);
-		
+
 		// emit result
 		if (params.has("output")) {
 			result.writeAsCsv(params.get("output"), "\n", " ");
@@ -123,29 +117,29 @@ public class ConnectedComponents {
 			result.print();
 		}
 	}
-	
+
 	// *************************************************************************
 	//     USER FUNCTIONS
 	// *************************************************************************
-	
+
 	/**
 	 * Function that turns a value into a 2-tuple where both fields are that value.
 	 */
 	@ForwardedFields("*->f0")
 	public static final class DuplicateValue<T> implements MapFunction<T, Tuple2<T, T>> {
-		
+
 		@Override
 		public Tuple2<T, T> map(T vertex) {
 			return new Tuple2<T, T>(vertex, vertex);
 		}
 	}
-	
+
 	/**
 	 * Undirected edges by emitting for each input edge the input edges itself and an inverted version.
 	 */
 	public static final class UndirectEdge implements FlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		Tuple2<Long, Long> invertedEdge = new Tuple2<Long, Long>();
-		
+
 		@Override
 		public void flatMap(Tuple2<Long, Long> edge, Collector<Tuple2<Long, Long>> out) {
 			invertedEdge.f0 = edge.f1;
@@ -154,7 +148,7 @@ public class ConnectedComponents {
 			out.collect(invertedEdge);
 		}
 	}
-	
+
 	/**
 	 * UDF that joins a (Vertex-ID, Component-ID) pair that represents the current component that
 	 * a vertex is associated with, with a (Source-Vertex-ID, Target-VertexID) edge. The function
@@ -169,9 +163,11 @@ public class ConnectedComponents {
 			return new Tuple2<Long, Long>(edge.f1, vertexWithComponent.f1);
 		}
 	}
-	
-
 
+	/**
+	 * Emit the candidate (Vertex-ID, Component-ID) pair if and only if the
+	 * candidate component ID is less than the vertex's current component ID.
+	 */
 	@ForwardedFieldsFirst("*")
 	public static final class ComponentIdFilter implements FlatJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
 
@@ -211,6 +207,4 @@ public class ConnectedComponents {
 			return ConnectedComponentsData.getDefaultEdgeDataSet(env);
 		}
 	}
-	
-	
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/EnumTriangles.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/EnumTriangles.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/EnumTriangles.java
index 5fbb321..2c553e4 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/EnumTriangles.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/EnumTriangles.java
@@ -39,17 +39,15 @@ import java.util.List;
 /**
  * Triangle enumeration is a pre-processing step to find closely connected parts in graphs.
  * A triangle consists of three edges that connect three vertices with each other.
- * 
- * <p>
- * The algorithm works as follows: 
- * It groups all edges that share a common vertex and builds triads, i.e., triples of vertices 
- * that are connected by two edges. Finally, all triads are filtered for which no third edge exists 
+ *
+ * <p>The algorithm works as follows:
+ * It groups all edges that share a common vertex and builds triads, i.e., triples of vertices
+ * that are connected by two edges. Finally, all triads are filtered for which no third edge exists
  * that closes the triangle.
- *  
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ *
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
- * <li>Edges are represented as pairs for vertex IDs which are separated by space 
+ * <li>Edges are represented as pairs for vertex IDs which are separated by space
  * characters. Edges are separated by new-line characters.<br>
  * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (undirected) edges (1)-(2), (2)-(12), (1)-(12), and (42)-(63)
  * that include a triangle
@@ -59,17 +57,15 @@ import java.util.List;
  *     /  \
  *   (2)-(12)
  * </pre>
- * 
- * Usage: <code>EnumTriangleBasic --edges &lt;path&gt; --output &lt;path&gt;</code><br>
- * If no parameters are provided, the program is run with default data from {@link EnumTrianglesData}. 
- * 
- * <p>
- * This example shows how to use:
+ *
+ * <p>Usage: <code>EnumTriangleBasic --edges &lt;path&gt; --output &lt;path&gt;</code><br>
+ * If no parameters are provided, the program is run with default data from {@link EnumTrianglesData}.
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li>Custom Java objects which extend Tuple
  * <li>Group Sorting
  * </ul>
- * 
  */
 @SuppressWarnings("serial")
 public class EnumTriangles {
@@ -77,7 +73,7 @@ public class EnumTriangles {
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
 
 		// Checking input parameters
@@ -88,7 +84,7 @@ public class EnumTriangles {
 
 		// make parameters available in the web interface
 		env.getConfig().setGlobalJobParameters(params);
-	
+
 		// read input data
 		DataSet<Edge> edges;
 		if (params.has("edges")) {
@@ -106,7 +102,7 @@ public class EnumTriangles {
 		// project edges by vertex id
 		DataSet<Edge> edgesById = edges
 				.map(new EdgeByIdProjector());
-		
+
 		DataSet<Triad> triangles = edgesById
 				// build triads
 				.groupBy(Edge.V1).sortGroup(Edge.V2, Order.ASCENDING).reduceGroup(new TriadBuilder())
@@ -128,60 +124,60 @@ public class EnumTriangles {
 	//     USER FUNCTIONS
 	// *************************************************************************
 
-	/** Converts a Tuple2 into an Edge */
+	/** Converts a Tuple2 into an Edge. */
 	@ForwardedFields("0;1")
 	public static class TupleEdgeConverter implements MapFunction<Tuple2<Integer, Integer>, Edge> {
 		private final Edge outEdge = new Edge();
-		
+
 		@Override
 		public Edge map(Tuple2<Integer, Integer> t) throws Exception {
 			outEdge.copyVerticesFromTuple2(t);
 			return outEdge;
 		}
 	}
-	
+
 	/** Projects an edge (pair of vertices) such that the id of the first is smaller than the id of the second. */
 	private static class EdgeByIdProjector implements MapFunction<Edge, Edge> {
-	
+
 		@Override
 		public Edge map(Edge inEdge) throws Exception {
-			
+
 			// flip vertices if necessary
-			if(inEdge.getFirstVertex() > inEdge.getSecondVertex()) {
+			if (inEdge.getFirstVertex() > inEdge.getSecondVertex()) {
 				inEdge.flipVertices();
 			}
-			
+
 			return inEdge;
 		}
 	}
-	
+
 	/**
 	 *  Builds triads (triples of vertices) from pairs of edges that share a vertex.
-	 *  The first vertex of a triad is the shared vertex, the second and third vertex are ordered by vertexId. 
+	 *  The first vertex of a triad is the shared vertex, the second and third vertex are ordered by vertexId.
 	 *  Assumes that input edges share the first vertex and are in ascending order of the second vertex.
 	 */
 	@ForwardedFields("0")
 	private static class TriadBuilder implements GroupReduceFunction<Edge, Triad> {
 		private final List<Integer> vertices = new ArrayList<Integer>();
 		private final Triad outTriad = new Triad();
-		
+
 		@Override
 		public void reduce(Iterable<Edge> edgesIter, Collector<Triad> out) throws Exception {
-			
+
 			final Iterator<Edge> edges = edgesIter.iterator();
-			
+
 			// clear vertex list
 			vertices.clear();
-			
+
 			// read first edge
 			Edge firstEdge = edges.next();
 			outTriad.setFirstVertex(firstEdge.getFirstVertex());
 			vertices.add(firstEdge.getSecondVertex());
-			
+
 			// build and emit triads
 			while (edges.hasNext()) {
 				Integer higherVertexId = edges.next().getSecondVertex();
-				
+
 				// combine vertex with all previously read vertices
 				for (Integer lowerVertexId : vertices) {
 					outTriad.setSecondVertex(lowerVertexId);
@@ -192,14 +188,14 @@ public class EnumTriangles {
 			}
 		}
 	}
-	
+
 	/** Filters triads (three vertices connected by two edges) without a closing third edge. */
 	private static class TriadFilter implements JoinFunction<Triad, Edge, Triad> {
-		
+
 		@Override
 		public Triad join(Triad triad, Edge edge) throws Exception {
 			return triad;
 		}
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/PageRank.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/PageRank.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/PageRank.java
index 33305af..f22f2e6 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/PageRank.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/PageRank.java
@@ -38,32 +38,28 @@ import static org.apache.flink.api.java.aggregation.Aggregations.SUM;
 
 /**
  * A basic implementation of the Page Rank algorithm using a bulk iteration.
- * 
- * <p>
- * This implementation requires a set of pages and a set of directed links as input and works as follows. <br> 
+ *
+ * <p>This implementation requires a set of pages and a set of directed links as input and works as follows. <br>
  * In each iteration, the rank of every page is evenly distributed to all pages it points to.
  * Each page collects the partial ranks of all pages that point to it, sums them up, and applies a dampening factor to the sum.
  * The result is the new rank of the page. A new iteration is started with the new ranks of all pages.
  * This implementation terminates after a fixed number of iterations.<br>
- * This is the Wikipedia entry for the <a href="http://en.wikipedia.org/wiki/Page_rank">Page Rank algorithm</a>. 
- * 
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ * This is the Wikipedia entry for the <a href="http://en.wikipedia.org/wiki/Page_rank">Page Rank algorithm</a>.
+ *
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
- * <li>Pages represented as an (long) ID separated by new-line characters.<br> 
+ * <li>Pages represented as an (long) ID separated by new-line characters.<br>
  * For example <code>"1\n2\n12\n42\n63"</code> gives five pages with IDs 1, 2, 12, 42, and 63.
- * <li>Links are represented as pairs of page IDs which are separated by space 
+ * <li>Links are represented as pairs of page IDs which are separated by space
  * characters. Links are separated by new-line characters.<br>
  * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (directed) links (1)-&gt;(2), (2)-&gt;(12), (1)-&gt;(12), and (42)-&gt;(63).<br>
  * For this simple implementation it is required that each page has at least one incoming and one outgoing link (a page can point to itself).
  * </ul>
- * 
- * <p>
- * Usage: <code>PageRankBasic --pages &lt;path&gt; --links &lt;path&gt; --output &lt;path&gt; --numPages &lt;n&gt; --iterations &lt;n&gt;</code><br>
+ *
+ * <p>Usage: <code>PageRankBasic --pages &lt;path&gt; --links &lt;path&gt; --output &lt;path&gt; --numPages &lt;n&gt; --iterations &lt;n&gt;</code><br>
  * If no parameters are provided, the program is run with default data from {@link org.apache.flink.examples.java.graph.util.PageRankData} and 10 iterations.
- * 
- * <p>
- * This example shows how to use:
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li>Bulk Iterations
  * <li>Default Join
@@ -72,42 +68,42 @@ import static org.apache.flink.api.java.aggregation.Aggregations.SUM;
  */
 @SuppressWarnings("serial")
 public class PageRank {
-	
+
 	private static final double DAMPENING_FACTOR = 0.85;
 	private static final double EPSILON = 0.0001;
-	
+
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
 
 		ParameterTool params = ParameterTool.fromArgs(args);
 
 		final int numPages = params.getInt("numPages", PageRankData.getNumberOfPages());
 		final int maxIterations = params.getInt("iterations", 10);
-		
+
 		// set up execution environment
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 
 		// make the parameters available to the web ui
 		env.getConfig().setGlobalJobParameters(params);
-		
+
 		// get input data
 		DataSet<Long> pagesInput = getPagesDataSet(env, params);
 		DataSet<Tuple2<Long, Long>> linksInput = getLinksDataSet(env, params);
-		
+
 		// assign initial rank to pages
 		DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
 				map(new RankAssigner((1.0d / numPages)));
-		
+
 		// build adjacency list from link input
-		DataSet<Tuple2<Long, Long[]>> adjacencyListInput = 
+		DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
 				linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());
-		
+
 		// set iterative data set
 		IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(maxIterations);
-		
+
 		DataSet<Tuple2<Long, Double>> newRanks = iteration
 				// join pages with outgoing edges and distribute rank
 				.join(adjacencyListInput).where(0).equalTo(0).flatMap(new JoinVertexWithEdgesMatch())
@@ -115,9 +111,9 @@ public class PageRank {
 				.groupBy(0).aggregate(SUM, 1)
 				// apply dampening factor
 				.map(new Dampener(DAMPENING_FACTOR, numPages));
-		
+
 		DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
-				newRanks, 
+				newRanks,
 				newRanks.join(iteration).where(0).equalTo(0)
 				// termination condition
 				.filter(new EpsilonFilter()));
@@ -131,45 +127,43 @@ public class PageRank {
 			System.out.println("Printing result to stdout. Use --output to specify output path.");
 			finalPageRanks.print();
 		}
-
-		
 	}
 
 	// *************************************************************************
 	//     USER FUNCTIONS
 	// *************************************************************************
 
-	/** 
-	 * A map function that assigns an initial rank to all pages. 
+	/**
+	 * A map function that assigns an initial rank to all pages.
 	 */
 	public static final class RankAssigner implements MapFunction<Long, Tuple2<Long, Double>> {
 		Tuple2<Long, Double> outPageWithRank;
-		
+
 		public RankAssigner(double rank) {
 			this.outPageWithRank = new Tuple2<Long, Double>(-1L, rank);
 		}
-		
+
 		@Override
 		public Tuple2<Long, Double> map(Long page) {
 			outPageWithRank.f0 = page;
 			return outPageWithRank;
 		}
 	}
-	
+
 	/**
 	 * A reduce function that takes a sequence of edges and builds the adjacency list for the vertex where the edges
 	 * originate. Run as a pre-processing step.
 	 */
 	@ForwardedFields("0")
 	public static final class BuildOutgoingEdgeList implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long[]>> {
-		
+
 		private final ArrayList<Long> neighbors = new ArrayList<Long>();
-		
+
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long[]>> out) {
 			neighbors.clear();
 			Long id = 0L;
-			
+
 			for (Tuple2<Long, Long> n : values) {
 				id = n.f0;
 				neighbors.add(n.f1);
@@ -177,7 +171,7 @@ public class PageRank {
 			out.collect(new Tuple2<Long, Long[]>(id, neighbors.toArray(new Long[neighbors.size()])));
 		}
 	}
-	
+
 	/**
 	 * Join function that distributes a fraction of a vertex's rank to all neighbors.
 	 */
@@ -194,16 +188,16 @@ public class PageRank {
 			}
 		}
 	}
-	
+
 	/**
-	 * The function that applies the page rank dampening formula
+	 * The function that applies the page rank dampening formula.
 	 */
 	@ForwardedFields("0")
-	public static final class Dampener implements MapFunction<Tuple2<Long,Double>, Tuple2<Long,Double>> {
+	public static final class Dampener implements MapFunction<Tuple2<Long, Double>, Tuple2<Long, Double>> {
 
 		private final double dampening;
 		private final double randomJump;
-		
+
 		public Dampener(double dampening, double numVertices) {
 			this.dampening = dampening;
 			this.randomJump = (1 - dampening) / numVertices;
@@ -215,7 +209,7 @@ public class PageRank {
 			return value;
 		}
 	}
-	
+
 	/**
 	 * Filter that filters vertices where the rank difference is below a threshold.
 	 */
@@ -226,11 +220,11 @@ public class PageRank {
 			return Math.abs(value.f0.f1 - value.f1.f1) > EPSILON;
 		}
 	}
-	
+
 	// *************************************************************************
 	//     UTIL METHODS
 	// *************************************************************************
-	
+
 	private static DataSet<Long> getPagesDataSet(ExecutionEnvironment env, ParameterTool params) {
 		if (params.has("pages")) {
 			return env.readCsvFile(params.get("pages"))

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/TransitiveClosureNaive.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/TransitiveClosureNaive.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/TransitiveClosureNaive.java
index 50e86ec..2857a0c 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/TransitiveClosureNaive.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/TransitiveClosureNaive.java
@@ -32,6 +32,15 @@ import org.apache.flink.util.Collector;
 import java.util.HashSet;
 import java.util.Set;
 
+/**
+ * The transitive closure of a graph contains an edge for each pair of vertices
+ * which are endpoints of at least one path in the graph.
+ *
+ * <p>This algorithm is implemented using a delta iteration. The transitive
+ * closure solution set is grown in each step by joining the workset of newly
+ * discovered path endpoints with the original graph edges and discarding
+ * previously discovered path endpoints (already in the solution set).
+ */
 @SuppressWarnings("serial")
 public class TransitiveClosureNaive {
 
@@ -57,9 +66,9 @@ public class TransitiveClosureNaive {
 			edges = ConnectedComponentsData.getDefaultEdgeDataSet(env);
 		}
 
-		IterativeDataSet<Tuple2<Long,Long>> paths = edges.iterate(maxIterations);
+		IterativeDataSet<Tuple2<Long, Long>> paths = edges.iterate(maxIterations);
 
-		DataSet<Tuple2<Long,Long>> nextPaths = paths
+		DataSet<Tuple2<Long, Long>> nextPaths = paths
 				.join(edges)
 				.where(1)
 				.equalTo(0)
@@ -83,17 +92,17 @@ public class TransitiveClosureNaive {
 					}
 				}).withForwardedFields("0;1");
 
-		DataSet<Tuple2<Long,Long>> newPaths = paths
+		DataSet<Tuple2<Long, Long>> newPaths = paths
 				.coGroup(nextPaths)
 				.where(0).equalTo(0)
 				.with(new CoGroupFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>>() {
-					Set<Tuple2<Long,Long>> prevSet = new HashSet<Tuple2<Long,Long>>();
+					Set<Tuple2<Long, Long>> prevSet = new HashSet<Tuple2<Long, Long>>();
 					@Override
 					public void coGroup(Iterable<Tuple2<Long, Long>> prevPaths, Iterable<Tuple2<Long, Long>> nextPaths, Collector<Tuple2<Long, Long>> out) throws Exception {
-						for (Tuple2<Long,Long> prev : prevPaths) {
+						for (Tuple2<Long, Long> prev : prevPaths) {
 							prevSet.add(prev);
 						}
-						for (Tuple2<Long,Long> next: nextPaths) {
+						for (Tuple2<Long, Long> next: nextPaths) {
 							if (!prevSet.contains(next)) {
 								out.collect(next);
 							}
@@ -103,7 +112,6 @@ public class TransitiveClosureNaive {
 
 		DataSet<Tuple2<Long, Long>> transitiveClosure = paths.closeWith(nextPaths, newPaths);
 
-
 		// emit result
 		if (params.has("output")) {
 			transitiveClosure.writeAsCsv(params.get("output"), "\n", " ");

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/ConnectedComponentsData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/ConnectedComponentsData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/ConnectedComponentsData.java
index dd1f596..6fb39d8 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/ConnectedComponentsData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/ConnectedComponentsData.java
@@ -18,12 +18,12 @@
 
 package org.apache.flink.examples.java.graph.util;
 
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.api.java.tuple.Tuple2;
+
+import java.util.LinkedList;
+import java.util.List;
 
 /**
  * Provides the default data sets used for the Connected Components example program.
@@ -31,7 +31,7 @@ import org.apache.flink.api.java.ExecutionEnvironment;
  *
  */
 public class ConnectedComponentsData {
-	
+
 	public static final long[] VERTICES  = new long[] {
 			1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
 
@@ -42,7 +42,7 @@ public class ConnectedComponentsData {
 		}
 		return env.fromCollection(verticesList);
 	}
-	
+
 	public static final Object[][] EDGES = new Object[][] {
 		new Object[]{1L, 2L},
 		new Object[]{2L, 3L},
@@ -59,14 +59,14 @@ public class ConnectedComponentsData {
 		new Object[]{1L, 15L},
 		new Object[]{16L, 1L}
 	};
-	
+
 	public static DataSet<Tuple2<Long, Long>> getDefaultEdgeDataSet(ExecutionEnvironment env) {
-		
+
 		List<Tuple2<Long, Long>> edgeList = new LinkedList<Tuple2<Long, Long>>();
 		for (Object[] edge : EDGES) {
 			edgeList.add(new Tuple2<Long, Long>((Long) edge[0], (Long) edge[1]));
 		}
 		return env.fromCollection(edgeList);
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesData.java
index a54b3da..cc3c3ac 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesData.java
@@ -16,16 +16,15 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.examples.java.graph.util;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.examples.java.graph.util.EnumTrianglesDataTypes.Edge;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Provides the default data sets used for the Triangle Enumeration example programs.
  * The default data sets are used, if no parameters are given to the program.
@@ -36,7 +35,7 @@ public class EnumTrianglesData {
 	public static final Object[][] EDGES = {
 		{1, 2},
 		{1, 3},
-		{1 ,4},
+		{1, 4},
 		{1, 5},
 		{2, 3},
 		{2, 5},
@@ -46,14 +45,14 @@ public class EnumTrianglesData {
 		{5, 6},
 		{7, 8}
 	};
-	
+
 	public static DataSet<EnumTrianglesDataTypes.Edge> getDefaultEdgeDataSet(ExecutionEnvironment env) {
-		
+
 		List<EnumTrianglesDataTypes.Edge> edges = new ArrayList<EnumTrianglesDataTypes.Edge>();
-		for(Object[] e : EDGES) {
-			edges.add(new Edge((Integer)e[0], (Integer)e[1]));
+		for (Object[] e : EDGES) {
+			edges.add(new Edge((Integer) e[0], (Integer) e[1]));
 		}
-		
+
 		return env.fromCollection(edges);
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesDataTypes.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesDataTypes.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesDataTypes.java
index 5c6e8b0..0aa0ab0 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesDataTypes.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/EnumTrianglesDataTypes.java
@@ -22,62 +22,88 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.tuple.Tuple4;
 
+/**
+ * The data classes for EnumTriangles.
+ */
 public class EnumTrianglesDataTypes {
 
+	/**
+	 * A POJO storing two vertex IDs.
+	 */
 	public static class Edge extends Tuple2<Integer, Integer> {
 		private static final long serialVersionUID = 1L;
-		
+
 		public static final int V1 = 0;
 		public static final int V2 = 1;
-		
+
 		public Edge() {}
-		
+
 		public Edge(final Integer v1, final Integer v2) {
 			this.setFirstVertex(v1);
 			this.setSecondVertex(v2);
 		}
-		
-		public Integer getFirstVertex() { return this.getField(V1); }
-		
-		public Integer getSecondVertex() { return this.getField(V2); }
-		
-		public void setFirstVertex(final Integer vertex1) { this.setField(vertex1, V1); }
-		
-		public void setSecondVertex(final Integer vertex2) { this.setField(vertex2, V2); }
-		
+
+		public Integer getFirstVertex() {
+			return this.getField(V1);
+		}
+
+		public Integer getSecondVertex() {
+			return this.getField(V2);
+		}
+
+		public void setFirstVertex(final Integer vertex1) {
+			this.setField(vertex1, V1);
+		}
+
+		public void setSecondVertex(final Integer vertex2) {
+			this.setField(vertex2, V2);
+		}
+
 		public void copyVerticesFromTuple2(Tuple2<Integer, Integer> t) {
 			this.setFirstVertex(t.f0);
 			this.setSecondVertex(t.f1);
 		}
-		
+
 		public void copyVerticesFromEdgeWithDegrees(EdgeWithDegrees ewd) {
 			this.setFirstVertex(ewd.getFirstVertex());
 			this.setSecondVertex(ewd.getSecondVertex());
 		}
-		
+
 		public void flipVertices() {
 			Integer tmp = this.getFirstVertex();
 			this.setFirstVertex(this.getSecondVertex());
 			this.setSecondVertex(tmp);
 		}
 	}
-	
+
+	/**
+	 * A POJO storing three vertex IDs.
+	 */
 	public static class Triad extends Tuple3<Integer, Integer, Integer> {
 		private static final long serialVersionUID = 1L;
-		
+
 		public static final int V1 = 0;
 		public static final int V2 = 1;
 		public static final int V3 = 2;
-		
+
 		public Triad() {}
-		
-		public void setFirstVertex(final Integer vertex1) { this.setField(vertex1, V1); }
-		
-		public void setSecondVertex(final Integer vertex2) { this.setField(vertex2, V2); }
-		
-		public void setThirdVertex(final Integer vertex3) { this.setField(vertex3, V3); }
+
+		public void setFirstVertex(final Integer vertex1) {
+			this.setField(vertex1, V1);
+		}
+
+		public void setSecondVertex(final Integer vertex2) {
+			this.setField(vertex2, V2);
+		}
+
+		public void setThirdVertex(final Integer vertex3) {
+			this.setField(vertex3, V3);
+		}
 	}
-	
+
+	/**
+	 * A POJO storing two vertex IDs with degree.
+	 */
 	public static class EdgeWithDegrees extends Tuple4<Integer, Integer, Integer, Integer> {
 		private static final long serialVersionUID = 1L;
 
@@ -85,25 +111,41 @@ public class EnumTrianglesDataTypes {
 		public static final int V2 = 1;
 		public static final int D1 = 2;
 		public static final int D2 = 3;
-		
+
 		public EdgeWithDegrees() { }
-			
-		public Integer getFirstVertex() { return this.getField(V1); }
-		
-		public Integer getSecondVertex() { return this.getField(V2); }
-		
-		public Integer getFirstDegree() { return this.getField(D1); }
-		
-		public Integer getSecondDegree() { return this.getField(D2); }
-		
-		public void setFirstVertex(final Integer vertex1) { this.setField(vertex1, V1); }
-		
-		public void setSecondVertex(final Integer vertex2) { this.setField(vertex2, V2); }
-		
-		public void setFirstDegree(final Integer degree1) { this.setField(degree1, D1); }
-		
-		public void setSecondDegree(final Integer degree2) { this.setField(degree2, D2); }
-		
+
+		public Integer getFirstVertex() {
+			return this.getField(V1);
+		}
+
+		public Integer getSecondVertex() {
+			return this.getField(V2);
+		}
+
+		public Integer getFirstDegree() {
+			return this.getField(D1);
+		}
+
+		public Integer getSecondDegree() {
+			return this.getField(D2);
+		}
+
+		public void setFirstVertex(final Integer vertex1) {
+			this.setField(vertex1, V1);
+		}
+
+		public void setSecondVertex(final Integer vertex2) {
+			this.setField(vertex2, V2);
+		}
+
+		public void setFirstDegree(final Integer degree1) {
+			this.setField(degree1, D1);
+		}
+
+		public void setSecondDegree(final Integer degree2) {
+			this.setField(degree2, D2);
+		}
+
 		public void copyFrom(final EdgeWithDegrees edge) {
 			this.setFirstVertex(edge.getFirstVertex());
 			this.setSecondVertex(edge.getSecondVertex());
@@ -111,6 +153,4 @@ public class EnumTrianglesDataTypes {
 			this.setSecondDegree(edge.getSecondDegree());
 		}
 	}
-	
-	
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/PageRankData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/PageRankData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/PageRankData.java
index f2d9078..1c0bde7 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/PageRankData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/graph/util/PageRankData.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.examples.java.graph.util;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.tuple.Tuple2;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Provides the default data sets used for the PageRank example program.
  * The default data sets are used, if no parameters are given to the program.
@@ -63,24 +63,24 @@ public class PageRankData {
 		{14L, 12L},
 		{15L, 1L},
 	};
-	
+
 	private static int numPages = 15;
-	
+
 	public static DataSet<Tuple2<Long, Long>> getDefaultEdgeDataSet(ExecutionEnvironment env) {
-		
+
 		List<Tuple2<Long, Long>> edges = new ArrayList<Tuple2<Long, Long>>();
-		for(Object[] e : EDGES) {
-			edges.add(new Tuple2<Long, Long>((Long)e[0], (Long)e[1]));
+		for (Object[] e : EDGES) {
+			edges.add(new Tuple2<Long, Long>((Long) e[0], (Long) e[1]));
 		}
 		return env.fromCollection(edges);
 	}
-	
+
 	public static DataSet<Long> getDefaultPagesDataSet(ExecutionEnvironment env) {
 		return env.generateSequence(1, 15);
 	}
-	
+
 	public static int getNumberOfPages() {
 		return numPages;
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/CollectionExecutionExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/CollectionExecutionExample.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/CollectionExecutionExample.java
index 44b566b..79ac9ec 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/CollectionExecutionExample.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/CollectionExecutionExample.java
@@ -18,39 +18,41 @@
 
 package org.apache.flink.examples.java.misc;
 
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.tuple.Tuple2;
 
-/** 
+import java.util.List;
+
+/**
  * This example shows how to use the collection based execution of Flink.
- * 
- * The collection based execution is a local mode that is not using the full Flink runtime.
+ *
+ * <p>The collection based execution is a local mode that is not using the full Flink runtime.
  * DataSet transformations are executed on Java collections.
- * 
- * See the "Local Execution" section in the documentation for more details: 
+ *
+ * <p>See the "Local Execution" section in the documentation for more details:
  * 	http://flink.apache.org/docs/latest/apis/local_execution.html
- * 
  */
 public class CollectionExecutionExample {
-	
+
 	/**
-	 * POJO class representing a user
+	 * POJO class representing a user.
 	 */
 	public static class User {
 		public int userIdentifier;
 		public String name;
+
 		public User() {}
+
 		public User(int userIdentifier, String name) {
 			this.userIdentifier = userIdentifier; this.name = name;
 		}
+
 		public String toString() {
-			return "User{userIdentifier="+userIdentifier+" name="+name+"}";
+			return "User{userIdentifier=" + userIdentifier + " name=" + name + "}";
 		}
 	}
-	
+
 	/**
 	 * POJO for an EMail.
 	 */
@@ -58,36 +60,40 @@ public class CollectionExecutionExample {
 		public int userId;
 		public String subject;
 		public String body;
+
 		public EMail() {}
+
 		public EMail(int userId, String subject, String body) {
 			this.userId = userId; this.subject = subject; this.body = body;
 		}
+
 		public String toString() {
-			return "eMail{userId="+userId+" subject="+subject+" body="+body+"}";
+			return "eMail{userId=" + userId + " subject=" + subject + " body=" + body + "}";
 		}
-		
+
 	}
+
 	public static void main(String[] args) throws Exception {
 		// initialize a new Collection-based execution environment
 		final ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
-		
+
 		// create objects for users and emails
 		User[] usersArray = { new User(1, "Peter"), new User(2, "John"), new User(3, "Bill") };
-		
+
 		EMail[] emailsArray = {new EMail(1, "Re: Meeting", "How about 1pm?"),
 							new EMail(1, "Re: Meeting", "Sorry, I'm not availble"),
 							new EMail(3, "Re: Re: Project proposal", "Give me a few more days to think about it.")};
-		
+
 		// convert objects into a DataSet
 		DataSet<User> users = env.fromElements(usersArray);
 		DataSet<EMail> emails = env.fromElements(emailsArray);
-		
+
 		// join the two DataSets
-		DataSet<Tuple2<User,EMail>> joined = users.join(emails).where("userIdentifier").equalTo("userId");
-		
+		DataSet<Tuple2<User, EMail>> joined = users.join(emails).where("userIdentifier").equalTo("userId");
+
 		// retrieve the resulting Tuple2 elements into a ArrayList.
-		List<Tuple2<User,EMail>> result = joined.collect();
-		
+		List<Tuple2<User, EMail>> result = joined.collect();
+
 		// Do some work with the resulting ArrayList (=Collection).
 		for (Tuple2<User, EMail> t : result) {
 			System.err.println("Result = " + t);

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/PiEstimation.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/PiEstimation.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/PiEstimation.java
index fc85110..f33d095 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/PiEstimation.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/misc/PiEstimation.java
@@ -23,39 +23,38 @@ import org.apache.flink.api.common.functions.ReduceFunction;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 
-/** 
+/**
  * Estimates the value of Pi using the Monte Carlo method.
- * The area of a circle is Pi * R^2, R being the radius of the circle 
+ * The area of a circle is Pi * R^2, R being the radius of the circle
  * The area of a square is 4 * R^2, where the length of the square's edge is 2*R.
- * 
- * Thus Pi = 4 * (area of circle / area of square).
- * 
- * The idea is to find a way to estimate the circle to square area ratio.
+ *
+ * <p>Thus Pi = 4 * (area of circle / area of square).
+ *
+ * <p>The idea is to find a way to estimate the circle to square area ratio.
  * The Monte Carlo method suggests collecting random points (within the square)
  * and then counting the number of points that fall within the circle
- * 
+ *
  * <pre>
  * {@code
  * x = Math.random()
  * y = Math.random()
- * 
+ *
  * x * x + y * y < 1
  * }
  * </pre>
  */
 @SuppressWarnings("serial")
 public class PiEstimation implements java.io.Serializable {
-	
-	
+
 	public static void main(String[] args) throws Exception {
 
 		final long numSamples = args.length > 0 ? Long.parseLong(args[0]) : 1000000;
-		
+
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-		
+
 		// count how many of the samples would randomly fall into
 		// the unit circle
-		DataSet<Long> count = 
+		DataSet<Long> count =
 				env.generateSequence(1, numSamples)
 				.map(new Sampler())
 				.reduce(new SumReducer());
@@ -68,9 +67,9 @@ public class PiEstimation implements java.io.Serializable {
 	//*************************************************************************
 	//     USER FUNCTIONS
 	//*************************************************************************
-	
-	
-	/** 
+
+
+	/**
 	 * Sampler randomly emits points that fall within a square of edge x * y.
 	 * It calculates the distance to the center of a virtually centered circle of radius x = y = 1
 	 * If the distance is less than 1, then and only then does it returns a 1.
@@ -85,8 +84,8 @@ public class PiEstimation implements java.io.Serializable {
 		}
 	}
 
-	
-	/** 
+
+	/**
 	 * Simply sums up all long values.
 	 */
 	public static final class SumReducer implements ReduceFunction<Long>{

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
index 90ad67a..7e9d41d 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
@@ -18,45 +18,41 @@
 
 package org.apache.flink.examples.java.ml;
 
-import java.io.Serializable;
-import java.util.Collection;
-
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.common.functions.ReduceFunction;
 import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.api.java.operators.IterativeDataSet;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.examples.java.ml.util.LinearRegressionData;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.api.java.operators.IterativeDataSet;
+
+import java.io.Serializable;
+import java.util.Collection;
 
 /**
  * This example implements a basic Linear Regression  to solve the y = theta0 + theta1*x problem using batch gradient descent algorithm.
  *
- * <p>
- * Linear Regression with BGD(batch gradient descent) algorithm is an iterative clustering algorithm and works as follows:<br>
+ * <p>Linear Regression with BGD(batch gradient descent) algorithm is an iterative clustering algorithm and works as follows:<br>
  * Giving a data set and target set, the BGD try to find out the best parameters for the data set to fit the target set.
  * In each iteration, the algorithm computes the gradient of the cost function and use it to update all the parameters.
  * The algorithm terminates after a fixed number of iterations (as in this implementation)
  * With enough iteration, the algorithm can minimize the cost function and find the best parameters
  * This is the Wikipedia entry for the <a href = "http://en.wikipedia.org/wiki/Linear_regression">Linear regression</a> and <a href = "http://en.wikipedia.org/wiki/Gradient_descent">Gradient descent algorithm</a>.
- * 
- * <p>
- * This implementation works on one-dimensional data. And find the two-dimensional theta.<br>
+ *
+ * <p>This implementation works on one-dimensional data. And find the two-dimensional theta.<br>
  * It find the best Theta parameter to fit the target.
- * 
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ *
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
  * <li>Data points are represented as two double values separated by a blank character. The first one represent the X(the training data) and the second represent the Y(target).
  * Data points are separated by newline characters.<br>
  * For example <code>"-0.02 -0.04\n5.3 10.6\n"</code> gives two data points (x=-0.02, y=-0.04) and (x=5.3, y=10.6).
  * </ul>
- * 
- * <p>
- * This example shows how to use:
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li> Bulk iterations
  * <li> Broadcast variables in bulk iterations
@@ -102,7 +98,7 @@ public class LinearRegression {
 		// set number of bulk iterations for SGD linear Regression
 		IterativeDataSet<Params> loop = parameters.iterate(iterations);
 
-		DataSet<Params> new_parameters = data
+		DataSet<Params> newParameters = data
 				// compute a single step using every sample
 				.map(new SubUpdate()).withBroadcastSet(loop, "parameters")
 				// sum up all the steps
@@ -111,10 +107,10 @@ public class LinearRegression {
 				.map(new Update());
 
 		// feed new parameters back into next iteration
-		DataSet<Params> result = loop.closeWith(new_parameters);
+		DataSet<Params> result = loop.closeWith(newParameters);
 
 		// emit result
-		if(params.has("output")) {
+		if (params.has("output")) {
 			result.writeAsText(params.get("output"));
 			// execute program
 			env.execute("Linear Regression example");
@@ -132,11 +128,11 @@ public class LinearRegression {
 	 * A simple data sample, x means the input, and y means the target.
 	 */
 	public static class Data implements Serializable{
-		public double x,y;
+		public double x, y;
 
-		public Data() {};
+		public Data() {}
 
-		public Data(double x ,double y){
+		public Data(double x, double y) {
 			this.x = x;
 			this.y = y;
 		}
@@ -153,11 +149,11 @@ public class LinearRegression {
 	 */
 	public static class Params implements Serializable{
 
-		private double theta0,theta1;
+		private double theta0, theta1;
 
-		public Params(){};
+		public Params() {}
 
-		public Params(double x0, double x1){
+		public Params(double x0, double x1) {
 			this.theta0 = x0;
 			this.theta1 = x1;
 		}
@@ -183,9 +179,9 @@ public class LinearRegression {
 			this.theta1 = theta1;
 		}
 
-		public Params div(Integer a){
-			this.theta0 = theta0 / a ;
-			this.theta1 = theta1 / a ;
+		public Params div(Integer a) {
+			this.theta0 = theta0 / a;
+			this.theta1 = theta1 / a;
 			return this;
 		}
 
@@ -198,9 +194,9 @@ public class LinearRegression {
 	/**
 	 * Compute a single BGD type update for every parameters.
 	 */
-	public static class SubUpdate extends RichMapFunction<Data,Tuple2<Params,Integer>> {
+	public static class SubUpdate extends RichMapFunction<Data, Tuple2<Params, Integer>> {
 
-		private Collection<Params> parameters; 
+		private Collection<Params> parameters;
 
 		private Params parameter;
 
@@ -215,18 +211,18 @@ public class LinearRegression {
 		@Override
 		public Tuple2<Params, Integer> map(Data in) throws Exception {
 
-			for(Params p : parameters){
-				this.parameter = p; 
+			for (Params p : parameters){
+				this.parameter = p;
 			}
 
-			double theta_0 = parameter.theta0 - 0.01*((parameter.theta0 + (parameter.theta1*in.x)) - in.y);
-			double theta_1 = parameter.theta1 - 0.01*(((parameter.theta0 + (parameter.theta1*in.x)) - in.y) * in.x);
+			double theta0 = parameter.theta0 - 0.01 * ((parameter.theta0 + (parameter.theta1 * in.x)) - in.y);
+			double theta1 = parameter.theta1 - 0.01 * (((parameter.theta0 + (parameter.theta1 * in.x)) - in.y) * in.x);
 
-			return new Tuple2<Params,Integer>(new Params(theta_0,theta_1),count);
+			return new Tuple2<Params, Integer>(new Params(theta0, theta1), count);
 		}
 	}
 
-	/**  
+	/**
 	 * Accumulator all the update.
 	 * */
 	public static class UpdateAccumulator implements ReduceFunction<Tuple2<Params, Integer>> {
@@ -234,10 +230,10 @@ public class LinearRegression {
 		@Override
 		public Tuple2<Params, Integer> reduce(Tuple2<Params, Integer> val1, Tuple2<Params, Integer> val2) {
 
-			double new_theta0 = val1.f0.theta0 + val2.f0.theta0;
-			double new_theta1 = val1.f0.theta1 + val2.f0.theta1;
-			Params result = new Params(new_theta0,new_theta1);
-			return new Tuple2<Params, Integer>( result, val1.f1 + val2.f1);
+			double newTheta0 = val1.f0.theta0 + val2.f0.theta0;
+			double newTheta1 = val1.f0.theta1 + val2.f0.theta1;
+			Params result = new Params(newTheta0, newTheta1);
+			return new Tuple2<Params, Integer>(result, val1.f1 + val2.f1);
 
 		}
 	}
@@ -245,7 +241,7 @@ public class LinearRegression {
 	/**
 	 * Compute the final update by average them.
 	 */
-	public static class Update implements MapFunction<Tuple2<Params, Integer>,Params> {
+	public static class Update implements MapFunction<Tuple2<Params, Integer>, Params> {
 
 		@Override
 		public Params map(Tuple2<Params, Integer> arg0) throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionData.java
index 838e320..3d0ad03 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionData.java
@@ -20,8 +20,8 @@ package org.apache.flink.examples.java.ml.util;
 
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.examples.java.ml.LinearRegression.Params;
 import org.apache.flink.examples.java.ml.LinearRegression.Data;
+import org.apache.flink.examples.java.ml.LinearRegression.Params;
 
 import java.util.LinkedList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionDataGenerator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionDataGenerator.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionDataGenerator.java
index a9f9e08..96ee56c 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionDataGenerator.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/ml/util/LinearRegressionDataGenerator.java
@@ -43,13 +43,13 @@ public class LinearRegressionDataGenerator {
 
 	/**
 	 * Main method to generate data for the {@link org.apache.flink.examples.java.ml.LinearRegression} example program.
-	 * <p>
-	 * The generator creates to files:
+	 *
+	 * <p>The generator creates to files:
 	 * <ul>
 	 * <li><code>{tmp.dir}/data</code> for the data points
-	 * </ul> 
-	 * 
-	 * @param args 
+	 * </ul>
+	 *
+	 * @param args
 	 * <ol>
 	 * <li>Int: Number of data points
 	 * <li><b>Optional</b> Long: Random seed
@@ -72,15 +72,15 @@ public class LinearRegressionDataGenerator {
 		// write the points out
 		BufferedWriter pointsOut = null;
 		try {
-			pointsOut = new BufferedWriter(new FileWriter(new File(tmpDir+"/"+POINTS_FILE)));
+			pointsOut = new BufferedWriter(new FileWriter(new File(tmpDir + "/" + POINTS_FILE)));
 			StringBuilder buffer = new StringBuilder();
 
 			// DIMENSIONALITY + 1 means that the number of x(dimensionality) and target y
-			double[] point = new double[DIMENSIONALITY+1];
+			double[] point = new double[DIMENSIONALITY + 1];
 
 			for (int i = 1; i <= numDataPoints; i++) {
 				point[0] = random.nextGaussian();
-				point[1] = 2 * point[0] + 0.01*random.nextGaussian();
+				point[1] = 2 * point[0] + 0.01 * random.nextGaussian();
 				writePoint(point, buffer, pointsOut);
 			}
 
@@ -91,17 +91,16 @@ public class LinearRegressionDataGenerator {
 			}
 		}
 
-		System.out.println("Wrote "+numDataPoints+" data points to "+tmpDir+"/"+POINTS_FILE);
+		System.out.println("Wrote " + numDataPoints + " data points to " + tmpDir + "/" + POINTS_FILE);
 	}
 
-
 	private static void writePoint(double[] data, StringBuilder buffer, BufferedWriter out) throws IOException {
 		buffer.setLength(0);
 
 		// write coordinates
 		for (int j = 0; j < data.length; j++) {
 			buffer.append(FORMAT.format(data[j]));
-			if(j < data.length - 1) {
+			if (j < data.length - 1) {
 				buffer.append(DELIMITER);
 			}
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/EmptyFieldsCountAccumulator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/EmptyFieldsCountAccumulator.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/EmptyFieldsCountAccumulator.java
index 87b5bff..feec3ef 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/EmptyFieldsCountAccumulator.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/EmptyFieldsCountAccumulator.java
@@ -18,32 +18,32 @@
 
 package org.apache.flink.examples.java.relational;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.commons.lang3.StringUtils;
-
 import org.apache.flink.api.common.JobExecutionResult;
 import org.apache.flink.api.common.accumulators.Accumulator;
+import org.apache.flink.api.common.functions.RichFilterFunction;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.api.common.functions.RichFilterFunction;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.configuration.Configuration;
 
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
 /**
  * This program filters lines from a CSV file with empty fields. In doing so, it counts the number of empty fields per
  * column within a CSV file using a custom accumulator for vectors. In this context, empty fields are those, that at
  * most contain whitespace characters like space and tab.
- * <p>
- * The input file is a plain text CSV file with the semicolon as field separator and double quotes as field delimiters
+ *
+ * <p>The input file is a plain text CSV file with the semicolon as field separator and double quotes as field delimiters
  * and three columns. See {@link #getDataSet(ExecutionEnvironment, ParameterTool)} for configuration.
- * <p>
- * Usage: <code>EmptyFieldsCountAccumulator --input &lt;path&gt; --output &lt;path&gt;</code> <br>
- * <p>
- * This example shows how to use:
+ *
+ * <p>Usage: <code>EmptyFieldsCountAccumulator --input &lt;path&gt; --output &lt;path&gt;</code> <br>
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li>custom accumulators
  * <li>tuple data types
@@ -122,7 +122,7 @@ public class EmptyFieldsCountAccumulator {
 
 	/**
 	 * This function filters all incoming tuples that have one or more empty fields.
-	 * In doing so, it also counts the number of empty fields per attribute with an accumulator (registered under 
+	 * In doing so, it also counts the number of empty fields per attribute with an accumulator (registered under
 	 * {@link EmptyFieldsCountAccumulator#EMPTY_FIELD_ACCUMULATOR}).
 	 */
 	public static final class EmptyFieldFilter extends RichFilterFunction<StringTriple> {


[2/7] flink git commit: [FLINK-6707] [examples] Activate strict checkstyle for flink-examples

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogData.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogData.java
index ce11953..e7c349c 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogData.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogData.java
@@ -18,14 +18,13 @@
 
 package org.apache.flink.examples.java.relational.util;
 
-import java.util.ArrayList;
-import java.util.List;
-
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.ExecutionEnvironment;
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Provides the default data sets used for the Weblog Analysis example program.
@@ -35,363 +34,363 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 public class WebLogData {
 
 	public static final Object [][] DOCUMENTS = {
-			new Object [] {"url_0","dolor ad amet enim laoreet nostrud veniam aliquip ex nonummy diam dolore tincidunt tation exerci exerci wisi dolor nostrud "},
-			new Object [] {"url_1","wisi minim adipiscing nibh adipiscing ut nibh Lorem Ut nonummy euismod nibh wisi sit consectetuer exerci sed aliquip aliquip dolore aliquam enim dolore veniam aliquam euismod suscipit ad adipiscing exerci aliquip consectetuer euismod aliquip ad exerci ex nibh ex erat exerci laoreet lobortis quis "},
-			new Object [] {"url_2","diam sed convection aliquip amet commodo nonummy sed sed commodo commodo diam commodo adipiscing ad exerci magna exerci tation quis lobortis "},
-			new Object [] {"url_3","exerci suscipit sed lobortis amet lobortis aliquip nibh nostrud ad convection commodo ad nibh sed minim amet ad ea ea "},
-			new Object [] {"url_4","sit enim dolor quis laoreet ullamcorper veniam adipiscing ex quis commodo "},
-			new Object [] {"url_5","elit aliquip ea nisl oscillations sit dolor ipsum tincidunt ullamcorper dolore enim adipiscing laoreet elit ea volutpat adipiscing ea nibh nostrud Ut aliquam veniam Lorem laoreet veniam aliquip "},
-			new Object [] {"url_6","consectetuer ad sed suscipit euismod aliquip quis ullamcorper oscillations tation consectetuer tation amet suscipit nibh enim nonummy veniam commodo commodo diam euismod dolor Ut aliquip diam ex ad nonummy ad tincidunt minim exerci consectetuer veniam convection aliquam ut ut Lorem euismod sed ipsum volutpat "},
-			new Object [] {"url_7","Ut volutpat veniam ut consectetuer diam ut aliquam dolor nostrud erat consectetuer adipiscing exerci consectetuer Ut ullamcorper suscipit aliquam sed dolor nisl "},
-			new Object [] {"url_8","suscipit amet wisi nisl veniam lobortis sit Lorem aliquam nostrud aliquam ipsum ut laoreet suscipit Lorem laoreet editors adipiscing ullamcorper veniam erat consectetuer ut lobortis dolore elit sed tincidunt ipsum tation ullamcorper nonummy adipiscing ex ad laoreet ipsum suscipit lobortis lobortis Ut nonummy adipiscing erat volutpat aliquam "},
-			new Object [] {"url_9","nonummy commodo tation editors ut quis sit quis lobortis ea dolore oscillations diam ad dolor lobortis nisl ad veniam ullamcorper quis magna volutpat sit ipsum consectetuer dolore exerci commodo magna erat enim ut suscipit "},
-			new Object [] {"url_10","amet erat magna consectetuer tation tation aliquip nibh aliquam sed adipiscing ut commodo ex erat tincidunt aliquam ipsum Ut Ut sit tincidunt adipiscing suscipit minim sed erat dolor consectetuer Lorem consectetuer Lorem amet nibh diam ea ex enim suscipit wisi dolor nonummy magna enim euismod ullamcorper ut suscipit adipiscing "},
-			new Object [] {"url_11","ex quis exerci tation diam elit nostrud nostrud ut ipsum elit amet diam laoreet amet consectetuer volutpat sed lobortis "},
-			new Object [] {"url_12","elit suscipit sit ullamcorper ut ad erat ut dolor nostrud quis nisl enim erat dolor convection ad minim ut veniam nostrud sed editors adipiscing volutpat Ut aliquip commodo sed euismod adipiscing erat adipiscing dolore nostrud minim sed lobortis ea diam "},
-			new Object [] {"url_13","enim ut quis commodo veniam minim erat lobortis ad diam ex dolor tincidunt exerci ut aliquip tincidunt minim ut magna sed enim wisi veniam oscillations Lorem consectetuer "},
-			new Object [] {"url_14","nibh ipsum ullamcorper volutpat ut wisi dolor quis amet euismod quis ipsum ipsum minim tation volutpat sit exerci volutpat amet nonummy euismod veniam consectetuer sit consectetuer tincidunt nibh aliquam lobortis tation veniam ut ullamcorper wisi magna Ut volutpat consectetuer erat quis dolore ea tation "},
-			new Object [] {"url_15","ad wisi sed enim aliquam oscillations nibh Lorem lobortis veniam nibh laoreet nonummy sed nibh Lorem adipiscing diam magna nostrud magna oscillations ut oscillations elit nostrud diam editors Lorem "},
-			new Object [] {"url_16","nostrud volutpat veniam exerci tincidunt nostrud quis elit ipsum ea nonummy volutpat dolor elit lobortis magna nisl ut ullamcorper magna Lorem exerci nibh nisl magna editors erat aliquam aliquam ullamcorper sit aliquam sit nostrud oscillations consectetuer adipiscing suscipit convection exerci ea ullamcorper ex nisl "},
-			new Object [] {"url_17","ad ex aliquam erat aliquam elit veniam laoreet ut amet amet nostrud ut adipiscing Ut Lorem suscipit ex magna ullamcorper aliquam ullamcorper ullamcorper amet amet commodo aliquam volutpat nonummy nonummy tincidunt amet tation tincidunt volutpat ut veniam nisl erat dolor enim nonummy nostrud adipiscing laoreet adipiscing "},
-			new Object [] {"url_18","lobortis ipsum ex tincidunt tincidunt editors euismod consectetuer ipsum adipiscing lobortis exerci adipiscing nonummy nisl dolore nonummy erat exerci nisl ut dolore wisi volutpat lobortis magna "},
-			new Object [] {"url_19","ipsum tation laoreet tation adipiscing wisi nibh diam Ut suscipit ad wisi "},
-			new Object [] {"url_20","diam Lorem enim wisi ad lobortis dolor Ut ipsum amet dolore consectetuer nisl exerci nisl nonummy minim Ut erat oscillations ut Lorem nostrud dolore Ut dolore exerci ad ipsum dolore ex dolore aliquip sed aliquam ex aliquip magna amet ex dolore oscillations aliquip tation magna Ut "},
-			new Object [] {"url_21","lobortis ut amet ex nisl ullamcorper tincidunt ut elit diam quis suscipit ad amet ipsum magna Ut ex tincidunt "},
-			new Object [] {"url_22","amet commodo nisl ad quis lobortis ut commodo sit ut erat exerci lobortis suscipit nibh ut nostrud ut adipiscing commodo commodo quis quis nostrud nisl ipsum nostrud laoreet Lorem nostrud erat nostrud amet consectetuer laoreet oscillations wisi sit magna nibh amet "},
-			new Object [] {"url_23","adipiscing suscipit suscipit aliquip suscipit consectetuer minim magna ea erat nibh sit suscipit sed dolor oscillations nonummy volutpat ut tincidunt "},
-			new Object [] {"url_24","commodo sed tincidunt aliquip aliquip dolore commodo nonummy sed erat ut ex exerci dolore adipiscing tincidunt ex diam amet aliquam "},
-			new Object [] {"url_25","consectetuer consectetuer exerci quis ea veniam aliquam laoreet minim ex "},
-			new Object [] {"url_26","dolor exerci euismod minim magna quis erat consectetuer sed ex erat dolore quis ut oscillations ullamcorper Lorem exerci ex nibh ut exerci ullamcorper veniam nibh ut commodo ut Ut nostrud tincidunt tincidunt ad dolore Lorem ea tation enim erat nibh ut ea nonummy sed sed wisi nisl dolore "},
-			new Object [] {"url_27","amet elit ea ea nostrud editors Ut nostrud amet laoreet adipiscing ut nisl nonummy tincidunt ea ipsum ex dolore dolore oscillations sit minim Ut wisi ut laoreet minim elit "},
-			new Object [] {"url_28","wisi exerci volutpat Ut nostrud euismod minim Ut sit euismod ut ea magna consectetuer nisl ad minim tation nisl adipiscing Lorem aliquam quis exerci erat minim aliquip sit Lorem wisi wisi ut "},
-			new Object [] {"url_29","amet sed laoreet amet aliquam minim enim tincidunt Lorem sit aliquip amet suscipit ut laoreet elit suscipit erat ut tincidunt suscipit ipsum sed euismod elit dolore euismod dolore ut dolor nostrud ipsum tincidunt commodo adipiscing aliquam ut wisi dolor dolor suscipit "},
-			new Object [] {"url_30","euismod Lorem ex tincidunt amet enim minim suscipit exerci diam veniam amet nostrud ea ea "},
-			new Object [] {"url_31","ex ipsum sit euismod euismod ullamcorper tincidunt ut wisi ea adipiscing sed diam tation ipsum dolor aliquam veniam nonummy aliquip aliquip Lorem ut minim nisl tation sit exerci ullamcorper Ut dolor euismod aliquam consectetuer ad nonummy commodo exerci "},
-			new Object [] {"url_32","volutpat ipsum lobortis nisl veniam minim adipiscing dolor editors quis nostrud amet nostrud "},
-			new Object [] {"url_33","commodo wisi aliquip ut aliquam sed nostrud ex diam ad nostrud enim ut amet enim ea ad sed tation nostrud suscipit ea magna magna Lorem amet lobortis ut quis nibh aliquam aliquam exerci aliquip lobortis consectetuer enim wisi ea nisl laoreet erat dolore "},
-			new Object [] {"url_34","tincidunt adipiscing enim tation nibh Ut dolore tincidunt tation laoreet suscipit minim aliquam volutpat laoreet suscipit tincidunt nibh ut ut sit nostrud nonummy tincidunt exerci sit ad sed consectetuer minim dolor dolore laoreet nostrud nibh laoreet ea adipiscing exerci dolore ipsum "},
-			new Object [] {"url_35","tation ut erat ut tation dolor Lorem laoreet Lorem elit adipiscing wisi aliquip nostrud elit Ut volutpat ea aliquam aliquip "},
-			new Object [] {"url_36","lobortis enim ullamcorper adipiscing consectetuer aliquip wisi enim minim Ut minim elit elit aliquam exerci ullamcorper amet lobortis adipiscing diam laoreet consectetuer nostrud diam diam amet ut enim ullamcorper aliquip diam ut nostrud diam magna amet nonummy commodo wisi enim ullamcorper suscipit euismod dolore tincidunt magna suscipit elit "},
-			new Object [] {"url_37","elit adipiscing nisl nisl ex aliquip nibh sed ut ad Lorem elit consectetuer ad volutpat lobortis amet veniam ipsum nibh ut consectetuer editors ad aliquam "},
-			new Object [] {"url_38","elit quis nibh adipiscing sit consectetuer ut euismod quis tincidunt quis nisl consectetuer dolor diam suscipit quis dolore Lorem suscipit nonummy sed ex "},
-			new Object [] {"url_39","nisl sit consectetuer elit oscillations enim ipsum enim nostrud adipiscing nostrud editors aliquam "},
-			new Object [] {"url_40","sed wisi dolor diam commodo ullamcorper commodo nostrud ullamcorper laoreet minim dolore suscipit laoreet tation aliquip "},
-			new Object [] {"url_41","ad consectetuer exerci nisl exerci amet enim diam lobortis Lorem ex volutpat volutpat nibh aliquam ut ullamcorper volutpat nostrud ut adipiscing ullamcorper "},
-			new Object [] {"url_42","minim laoreet tation magna veniam ut ea sit ipsum tincidunt Ut amet ex aliquip ex euismod exerci wisi elit editors ad amet veniam ad editors "},
-			new Object [] {"url_43","ut nisl ad ullamcorper nibh Ut editors exerci enim exerci ea laoreet veniam ea amet exerci volutpat amet ad "},
-			new Object [] {"url_44","volutpat tincidunt enim amet sed tincidunt consectetuer ullamcorper nisl Ut adipiscing tation ad ad amet nonummy elit erat nibh Lorem erat elit laoreet consectetuer sed aliquip nostrud "},
-			new Object [] {"url_45","sed aliquam ut ut consectetuer wisi euismod enim erat euismod quis exerci amet tation sit "},
-			new Object [] {"url_46","lobortis oscillations tation aliquam dolore Lorem aliquip tation exerci ullamcorper aliquam aliquip lobortis ex tation dolor ut ut sed suscipit nisl ullamcorper sed editors laoreet aliquip enim dolor veniam tincidunt sed euismod tation "},
-			new Object [] {"url_47","Lorem Lorem ut wisi ad ut tation consectetuer exerci convection tation ullamcorper sed dolore quis aliquam ipsum lobortis commodo nonummy "},
-			new Object [] {"url_48","laoreet minim veniam nisl elit sit amet commodo ex ullamcorper suscipit aliquip laoreet convection Ut ex minim aliquam "},
-			new Object [] {"url_49","lobortis nonummy minim amet sit veniam quis consectetuer tincidunt laoreet quis "},
-			new Object [] {"url_50","lobortis nisl commodo dolor amet nibh editors enim magna minim elit euismod diam laoreet laoreet ad minim sed ut Ut lobortis adipiscing quis sed ut aliquam oscillations exerci tation consectetuer lobortis elit tincidunt consectetuer minim amet dolore quis aliquam Ut exerci sed aliquam quis quis ullamcorper Ut ex tincidunt "},
-			new Object [] {"url_51","nostrud nisl ea erat ut suscipit Ut sit oscillations ullamcorper nonummy magna lobortis dolore editors tincidunt nostrud suscipit ex quis tation ut sit amet nostrud laoreet ex tincidunt "},
-			new Object [] {"url_52","ea tation commodo elit sed ex sed quis enim nisl magna laoreet adipiscing amet sit nostrud consectetuer nibh tincidunt veniam ex veniam euismod exerci sed dolore suscipit nisl tincidunt euismod quis Ut enim euismod dolor diam exerci magna exerci ut exerci nisl "},
-			new Object [] {"url_53","volutpat amet Ut lobortis dolor tation minim nonummy lobortis convection nostrud "},
-			new Object [] {"url_54","ullamcorper commodo Ut amet sit nostrud aliquam ad amet wisi enim nostrud ipsum nisl veniam erat aliquam ex aliquam dolor dolor ut consectetuer euismod exerci elit exerci Ut ea minim enim consectetuer ad consectetuer nonummy convection adipiscing ad ullamcorper lobortis nonummy laoreet nonummy aliquam ullamcorper ad nostrud amet "},
-			new Object [] {"url_55","wisi magna editors amet aliquam diam amet aliquip nisl consectetuer laoreet nonummy suscipit euismod diam enim tation elit ut lobortis quis euismod suscipit nostrud ea ea commodo lobortis dolore Ut nisl nostrud dolor laoreet euismod ea dolore aliquam ut Lorem exerci ex sit "},
-			new Object [] {"url_56","ex dolor veniam wisi laoreet ut exerci diam ad ex ut ut laoreet ut nisl ullamcorper nisl "},
-			new Object [] {"url_57","diam adipiscing Ut ut Lorem amet erat elit erat magna adipiscing euismod elit ullamcorper nostrud aliquam dolor ullamcorper sit tation tation "},
-			new Object [] {"url_58","laoreet convection veniam lobortis dolore ut nonummy commodo erat lobortis veniam nostrud dolore minim commodo ut consectetuer magna erat ea dolore Lorem suscipit ex ipsum exerci sed enim ea tation suscipit enim adipiscing "},
-			new Object [] {"url_59","amet ut ut Ut ad dolor quis ad magna exerci suscipit magna nibh commodo euismod amet euismod wisi diam suscipit dolore Lorem dolor ex amet exerci aliquip ut ut lobortis quis elit minim sed Lorem "},
-			new Object [] {"url_60","ut ut amet ullamcorper amet euismod dolor amet elit exerci adipiscing sed suscipit sed exerci wisi diam veniam wisi suscipit ut quis nibh ullamcorper ex quis magna dolore volutpat editors minim ut sit aliquip oscillations nisl ipsum "},
-			new Object [] {"url_61","nibh nostrud tincidunt lobortis adipiscing adipiscing ullamcorper ullamcorper ipsum nisl ullamcorper aliquip laoreet commodo ut tation wisi diam commodo aliquip commodo suscipit tincidunt volutpat elit enim laoreet ut nostrud ad nonummy ipsum "},
-			new Object [] {"url_62","Ut ut minim enim amet euismod erat elit commodo consectetuer Ut quis dolor ex diam quis wisi tation tincidunt laoreet volutpat "},
-			new Object [] {"url_63","ut erat volutpat euismod amet ea nonummy lobortis ut Ut ea veniam sed veniam nostrud "},
-			new Object [] {"url_64","tation dolor suscipit minim nisl wisi consectetuer aliquip tation Ut commodo ut dolore consectetuer elit wisi nisl ipsum "},
-			new Object [] {"url_65","ullamcorper nisl Lorem magna tation veniam aliquam diam amet euismod "},
-			new Object [] {"url_66","euismod aliquam tincidunt Ut volutpat ea lobortis sit ut volutpat ut lobortis ut lobortis ut nisl amet dolor sed ipsum enim ullamcorper diam euismod nostrud wisi erat quis diam nibh Ut dolore sed amet tation enim diam "},
-			new Object [] {"url_67","amet minim minim amet laoreet Lorem aliquam veniam elit volutpat magna adipiscing enim enim euismod laoreet sed ex sed aliquam ad ea ut adipiscing suscipit ex minim dolore minim ea laoreet nisl "},
-			new Object [] {"url_68","aliquam ea volutpat ut wisi tation tation nibh nisl erat laoreet ea volutpat dolor dolor aliquam exerci quis ullamcorper aliquam ut quis suscipit "},
-			new Object [] {"url_69","quis exerci ut aliquip wisi dolore magna nibh consectetuer magna tation ullamcorper lobortis sed amet adipiscing minim suscipit nibh nibh nostrud euismod enim "},
-			new Object [] {"url_70","tation enim consectetuer adipiscing wisi laoreet diam aliquip nostrud elit nostrud aliquip ea minim amet diam dolore "},
-			new Object [] {"url_71","consectetuer tincidunt nibh amet tation nonummy sit tation diam sed diam tation "},
-			new Object [] {"url_72","Lorem ut nostrud nonummy minim quis euismod lobortis nostrud nonummy adipiscing tincidunt consectetuer ut nibh ad suscipit dolor ut elit dolore amet ut quis tation ullamcorper nonummy laoreet ullamcorper aliquam dolore convection dolor tincidunt ut ullamcorper ex dolor suscipit erat oscillations ad "},
-			new Object [] {"url_73","elit Ut commodo ut ullamcorper ullamcorper ut euismod commodo diam aliquip suscipit consectetuer exerci tation nostrud ut wisi exerci sed ut elit sed volutpat Lorem nibh laoreet consectetuer ex Lorem elit aliquam commodo lobortis ad "},
-			new Object [] {"url_74","quis magna laoreet commodo aliquam nisl ullamcorper veniam tation wisi consectetuer commodo consectetuer ad dolore aliquam dolor elit amet sit amet nibh commodo erat veniam aliquip dolore ad magna ad ipsum Ut exerci ea volutpat nisl amet nostrud sit "},
-			new Object [] {"url_75","tincidunt suscipit sit aliquip aliquam adipiscing dolore exerci Ut suscipit ut sit laoreet suscipit wisi sit enim nonummy consectetuer dolore editors "},
-			new Object [] {"url_76","veniam ullamcorper tation sit suscipit dolor suscipit veniam sit Lorem quis sed nostrud ad tincidunt elit adipiscing "},
-			new Object [] {"url_77","volutpat sit amet veniam quis ipsum nibh elit enim commodo magna veniam magna convection "},
-			new Object [] {"url_78","tation dolore minim elit nisl volutpat tation laoreet enim nostrud exerci dolore tincidunt aliquip Lorem ipsum nostrud quis adipiscing ullamcorper erat lobortis tation commodo Ut ipsum commodo magna ad ipsum ut enim "},
-			new Object [] {"url_79","lobortis amet elit Lorem amet nonummy commodo tation ex ea amet Lorem ea nonummy commodo veniam volutpat nibh wisi ad ipsum euismod ea convection nostrud nisl erat veniam Ut aliquip ad aliquip editors wisi magna tation nostrud nonummy adipiscing ullamcorper aliquip "},
-			new Object [] {"url_80","tincidunt nostrud nostrud magna ea euismod ea consectetuer nisl exerci ea dolor nisl commodo ex erat ipsum exerci suscipit ad nisl ea nonummy suscipit adipiscing laoreet sit euismod nibh adipiscing sed minim commodo amet "},
-			new Object [] {"url_81","nostrud erat ut sed editors erat amet magna lobortis diam laoreet dolor amet nibh ut ipsum ipsum amet ut sed ut exerci elit suscipit wisi magna ut veniam nisl commodo enim adipiscing laoreet ad Lorem oscillations "},
-			new Object [] {"url_82","quis commodo nibh nibh volutpat suscipit dolore magna tincidunt nibh ut ad ullamcorper ullamcorper quis enim ad ut tation minim laoreet veniam dolor sed tincidunt exerci exerci nostrud ullamcorper amet ut ut ullamcorper "},
-			new Object [] {"url_83","sit suscipit volutpat elit tation elit sed sed dolor ex ex ipsum euismod laoreet magna lobortis ad "},
-			new Object [] {"url_84","lobortis ipsum euismod enim ea tation veniam tation oscillations aliquip consectetuer euismod ut sed lobortis tation oscillations commodo euismod laoreet suscipit amet elit ullamcorper volutpat aliquam ea enim ullamcorper consectetuer laoreet tation quis ut commodo erat euismod dolor laoreet ullamcorper laoreet "},
-			new Object [] {"url_85","adipiscing sit quis commodo consectetuer quis enim euismod exerci nonummy ea nostrud Ut veniam sit aliquip nisl enim "},
-			new Object [] {"url_86","nostrud dolore veniam veniam wisi aliquip adipiscing diam sed quis ullamcorper "},
-			new Object [] {"url_87","quis Lorem suscipit Ut nibh diam euismod consectetuer lobortis ipsum sed suscipit consectetuer euismod laoreet ut wisi nisl elit quis commodo adipiscing adipiscing suscipit aliquam nisl quis magna ipsum enim ad quis ea magna Lorem nibh ea "},
-			new Object [] {"url_88","euismod commodo sed tincidunt Ut veniam consectetuer quis erat ex ea erat laoreet commodo nibh minim "},
-			new Object [] {"url_89","tation diam editors Ut enim nibh Lorem volutpat quis diam suscipit exerci wisi ad "},
-			new Object [] {"url_90","volutpat editors ea nibh wisi ad amet volutpat nisl ullamcorper nibh volutpat minim ex ut sit veniam Lorem consectetuer quis ad sit suscipit volutpat wisi diam sed tincidunt ipsum minim convection ea diam oscillations quis lobortis "},
-			new Object [] {"url_91","enim minim nonummy ea minim euismod adipiscing editors volutpat magna sit magna ut ipsum ut "},
-			new Object [] {"url_92","nisl Ut commodo amet euismod lobortis ea ea wisi commodo Lorem sit ipsum volutpat nonummy exerci erat elit exerci magna ad erat enim laoreet quis nostrud wisi ut veniam amet ullamcorper lobortis ad suscipit volutpat veniam nostrud nibh quis ipsum dolore consectetuer veniam ipsum aliquip dolore sed laoreet ipsum "},
-			new Object [] {"url_93","nonummy aliquam ad lobortis Lorem erat ad tation Lorem exerci ex "},
-			new Object [] {"url_94","nonummy dolore commodo exerci ex quis ut suscipit elit laoreet sit tation magna veniam ea sit nonummy veniam Lorem quis nibh aliquip exerci amet ullamcorper adipiscing erat nisl editors diam commodo ad euismod adipiscing ea suscipit exerci aliquip volutpat tation enim volutpat sit "},
-			new Object [] {"url_95","sit suscipit oscillations ipsum nibh dolor ea dolore ea elit ipsum minim editors magna consectetuer ullamcorper commodo nonummy sit nostrud aliquip sit erat ullamcorper ullamcorper nibh veniam erat quis dolore nonummy "},
-			new Object [] {"url_96","nostrud quis ut volutpat magna ad quis adipiscing Lorem commodo exerci laoreet magna adipiscing erat quis wisi ea ea laoreet enim convection ad dolor nisl amet nibh aliquam adipiscing tincidunt minim diam Lorem commodo adipiscing volutpat "},
-			new Object [] {"url_97","laoreet laoreet suscipit nostrud dolore adipiscing volutpat Ut sed nisl diam ullamcorper ex ut ut dolor amet nostrud euismod dolore veniam veniam enim tation veniam ea minim minim volutpat tincidunt "},
-			new Object [] {"url_98","quis lobortis amet wisi nostrud ipsum aliquam convection tincidunt dolore ullamcorper nibh lobortis volutpat ea nostrud oscillations minim nonummy enim ad lobortis exerci ipsum ullamcorper nibh nonummy diam amet enim veniam ut nostrud "},
-			new Object [] {"url_99","aliquam wisi suscipit commodo diam amet amet magna nisl enim nostrud tation nisl nostrud nibh ut "}
+			new Object [] {"url_0", "dolor ad amet enim laoreet nostrud veniam aliquip ex nonummy diam dolore tincidunt tation exerci exerci wisi dolor nostrud "},
+			new Object [] {"url_1", "wisi minim adipiscing nibh adipiscing ut nibh Lorem Ut nonummy euismod nibh wisi sit consectetuer exerci sed aliquip aliquip dolore aliquam enim dolore veniam aliquam euismod suscipit ad adipiscing exerci aliquip consectetuer euismod aliquip ad exerci ex nibh ex erat exerci laoreet lobortis quis "},
+			new Object [] {"url_2", "diam sed convection aliquip amet commodo nonummy sed sed commodo commodo diam commodo adipiscing ad exerci magna exerci tation quis lobortis "},
+			new Object [] {"url_3", "exerci suscipit sed lobortis amet lobortis aliquip nibh nostrud ad convection commodo ad nibh sed minim amet ad ea ea "},
+			new Object [] {"url_4", "sit enim dolor quis laoreet ullamcorper veniam adipiscing ex quis commodo "},
+			new Object [] {"url_5", "elit aliquip ea nisl oscillations sit dolor ipsum tincidunt ullamcorper dolore enim adipiscing laoreet elit ea volutpat adipiscing ea nibh nostrud Ut aliquam veniam Lorem laoreet veniam aliquip "},
+			new Object [] {"url_6", "consectetuer ad sed suscipit euismod aliquip quis ullamcorper oscillations tation consectetuer tation amet suscipit nibh enim nonummy veniam commodo commodo diam euismod dolor Ut aliquip diam ex ad nonummy ad tincidunt minim exerci consectetuer veniam convection aliquam ut ut Lorem euismod sed ipsum volutpat "},
+			new Object [] {"url_7", "Ut volutpat veniam ut consectetuer diam ut aliquam dolor nostrud erat consectetuer adipiscing exerci consectetuer Ut ullamcorper suscipit aliquam sed dolor nisl "},
+			new Object [] {"url_8", "suscipit amet wisi nisl veniam lobortis sit Lorem aliquam nostrud aliquam ipsum ut laoreet suscipit Lorem laoreet editors adipiscing ullamcorper veniam erat consectetuer ut lobortis dolore elit sed tincidunt ipsum tation ullamcorper nonummy adipiscing ex ad laoreet ipsum suscipit lobortis lobortis Ut nonummy adipiscing erat volutpat aliquam "},
+			new Object [] {"url_9", "nonummy commodo tation editors ut quis sit quis lobortis ea dolore oscillations diam ad dolor lobortis nisl ad veniam ullamcorper quis magna volutpat sit ipsum consectetuer dolore exerci commodo magna erat enim ut suscipit "},
+			new Object [] {"url_10", "amet erat magna consectetuer tation tation aliquip nibh aliquam sed adipiscing ut commodo ex erat tincidunt aliquam ipsum Ut Ut sit tincidunt adipiscing suscipit minim sed erat dolor consectetuer Lorem consectetuer Lorem amet nibh diam ea ex enim suscipit wisi dolor nonummy magna enim euismod ullamcorper ut suscipit adipiscing "},
+			new Object [] {"url_11", "ex quis exerci tation diam elit nostrud nostrud ut ipsum elit amet diam laoreet amet consectetuer volutpat sed lobortis "},
+			new Object [] {"url_12", "elit suscipit sit ullamcorper ut ad erat ut dolor nostrud quis nisl enim erat dolor convection ad minim ut veniam nostrud sed editors adipiscing volutpat Ut aliquip commodo sed euismod adipiscing erat adipiscing dolore nostrud minim sed lobortis ea diam "},
+			new Object [] {"url_13", "enim ut quis commodo veniam minim erat lobortis ad diam ex dolor tincidunt exerci ut aliquip tincidunt minim ut magna sed enim wisi veniam oscillations Lorem consectetuer "},
+			new Object [] {"url_14", "nibh ipsum ullamcorper volutpat ut wisi dolor quis amet euismod quis ipsum ipsum minim tation volutpat sit exerci volutpat amet nonummy euismod veniam consectetuer sit consectetuer tincidunt nibh aliquam lobortis tation veniam ut ullamcorper wisi magna Ut volutpat consectetuer erat quis dolore ea tation "},
+			new Object [] {"url_15", "ad wisi sed enim aliquam oscillations nibh Lorem lobortis veniam nibh laoreet nonummy sed nibh Lorem adipiscing diam magna nostrud magna oscillations ut oscillations elit nostrud diam editors Lorem "},
+			new Object [] {"url_16", "nostrud volutpat veniam exerci tincidunt nostrud quis elit ipsum ea nonummy volutpat dolor elit lobortis magna nisl ut ullamcorper magna Lorem exerci nibh nisl magna editors erat aliquam aliquam ullamcorper sit aliquam sit nostrud oscillations consectetuer adipiscing suscipit convection exerci ea ullamcorper ex nisl "},
+			new Object [] {"url_17", "ad ex aliquam erat aliquam elit veniam laoreet ut amet amet nostrud ut adipiscing Ut Lorem suscipit ex magna ullamcorper aliquam ullamcorper ullamcorper amet amet commodo aliquam volutpat nonummy nonummy tincidunt amet tation tincidunt volutpat ut veniam nisl erat dolor enim nonummy nostrud adipiscing laoreet adipiscing "},
+			new Object [] {"url_18", "lobortis ipsum ex tincidunt tincidunt editors euismod consectetuer ipsum adipiscing lobortis exerci adipiscing nonummy nisl dolore nonummy erat exerci nisl ut dolore wisi volutpat lobortis magna "},
+			new Object [] {"url_19", "ipsum tation laoreet tation adipiscing wisi nibh diam Ut suscipit ad wisi "},
+			new Object [] {"url_20", "diam Lorem enim wisi ad lobortis dolor Ut ipsum amet dolore consectetuer nisl exerci nisl nonummy minim Ut erat oscillations ut Lorem nostrud dolore Ut dolore exerci ad ipsum dolore ex dolore aliquip sed aliquam ex aliquip magna amet ex dolore oscillations aliquip tation magna Ut "},
+			new Object [] {"url_21", "lobortis ut amet ex nisl ullamcorper tincidunt ut elit diam quis suscipit ad amet ipsum magna Ut ex tincidunt "},
+			new Object [] {"url_22", "amet commodo nisl ad quis lobortis ut commodo sit ut erat exerci lobortis suscipit nibh ut nostrud ut adipiscing commodo commodo quis quis nostrud nisl ipsum nostrud laoreet Lorem nostrud erat nostrud amet consectetuer laoreet oscillations wisi sit magna nibh amet "},
+			new Object [] {"url_23", "adipiscing suscipit suscipit aliquip suscipit consectetuer minim magna ea erat nibh sit suscipit sed dolor oscillations nonummy volutpat ut tincidunt "},
+			new Object [] {"url_24", "commodo sed tincidunt aliquip aliquip dolore commodo nonummy sed erat ut ex exerci dolore adipiscing tincidunt ex diam amet aliquam "},
+			new Object [] {"url_25", "consectetuer consectetuer exerci quis ea veniam aliquam laoreet minim ex "},
+			new Object [] {"url_26", "dolor exerci euismod minim magna quis erat consectetuer sed ex erat dolore quis ut oscillations ullamcorper Lorem exerci ex nibh ut exerci ullamcorper veniam nibh ut commodo ut Ut nostrud tincidunt tincidunt ad dolore Lorem ea tation enim erat nibh ut ea nonummy sed sed wisi nisl dolore "},
+			new Object [] {"url_27", "amet elit ea ea nostrud editors Ut nostrud amet laoreet adipiscing ut nisl nonummy tincidunt ea ipsum ex dolore dolore oscillations sit minim Ut wisi ut laoreet minim elit "},
+			new Object [] {"url_28", "wisi exerci volutpat Ut nostrud euismod minim Ut sit euismod ut ea magna consectetuer nisl ad minim tation nisl adipiscing Lorem aliquam quis exerci erat minim aliquip sit Lorem wisi wisi ut "},
+			new Object [] {"url_29", "amet sed laoreet amet aliquam minim enim tincidunt Lorem sit aliquip amet suscipit ut laoreet elit suscipit erat ut tincidunt suscipit ipsum sed euismod elit dolore euismod dolore ut dolor nostrud ipsum tincidunt commodo adipiscing aliquam ut wisi dolor dolor suscipit "},
+			new Object [] {"url_30", "euismod Lorem ex tincidunt amet enim minim suscipit exerci diam veniam amet nostrud ea ea "},
+			new Object [] {"url_31", "ex ipsum sit euismod euismod ullamcorper tincidunt ut wisi ea adipiscing sed diam tation ipsum dolor aliquam veniam nonummy aliquip aliquip Lorem ut minim nisl tation sit exerci ullamcorper Ut dolor euismod aliquam consectetuer ad nonummy commodo exerci "},
+			new Object [] {"url_32", "volutpat ipsum lobortis nisl veniam minim adipiscing dolor editors quis nostrud amet nostrud "},
+			new Object [] {"url_33", "commodo wisi aliquip ut aliquam sed nostrud ex diam ad nostrud enim ut amet enim ea ad sed tation nostrud suscipit ea magna magna Lorem amet lobortis ut quis nibh aliquam aliquam exerci aliquip lobortis consectetuer enim wisi ea nisl laoreet erat dolore "},
+			new Object [] {"url_34", "tincidunt adipiscing enim tation nibh Ut dolore tincidunt tation laoreet suscipit minim aliquam volutpat laoreet suscipit tincidunt nibh ut ut sit nostrud nonummy tincidunt exerci sit ad sed consectetuer minim dolor dolore laoreet nostrud nibh laoreet ea adipiscing exerci dolore ipsum "},
+			new Object [] {"url_35", "tation ut erat ut tation dolor Lorem laoreet Lorem elit adipiscing wisi aliquip nostrud elit Ut volutpat ea aliquam aliquip "},
+			new Object [] {"url_36", "lobortis enim ullamcorper adipiscing consectetuer aliquip wisi enim minim Ut minim elit elit aliquam exerci ullamcorper amet lobortis adipiscing diam laoreet consectetuer nostrud diam diam amet ut enim ullamcorper aliquip diam ut nostrud diam magna amet nonummy commodo wisi enim ullamcorper suscipit euismod dolore tincidunt magna suscipit elit "},
+			new Object [] {"url_37", "elit adipiscing nisl nisl ex aliquip nibh sed ut ad Lorem elit consectetuer ad volutpat lobortis amet veniam ipsum nibh ut consectetuer editors ad aliquam "},
+			new Object [] {"url_38", "elit quis nibh adipiscing sit consectetuer ut euismod quis tincidunt quis nisl consectetuer dolor diam suscipit quis dolore Lorem suscipit nonummy sed ex "},
+			new Object [] {"url_39", "nisl sit consectetuer elit oscillations enim ipsum enim nostrud adipiscing nostrud editors aliquam "},
+			new Object [] {"url_40", "sed wisi dolor diam commodo ullamcorper commodo nostrud ullamcorper laoreet minim dolore suscipit laoreet tation aliquip "},
+			new Object [] {"url_41", "ad consectetuer exerci nisl exerci amet enim diam lobortis Lorem ex volutpat volutpat nibh aliquam ut ullamcorper volutpat nostrud ut adipiscing ullamcorper "},
+			new Object [] {"url_42", "minim laoreet tation magna veniam ut ea sit ipsum tincidunt Ut amet ex aliquip ex euismod exerci wisi elit editors ad amet veniam ad editors "},
+			new Object [] {"url_43", "ut nisl ad ullamcorper nibh Ut editors exerci enim exerci ea laoreet veniam ea amet exerci volutpat amet ad "},
+			new Object [] {"url_44", "volutpat tincidunt enim amet sed tincidunt consectetuer ullamcorper nisl Ut adipiscing tation ad ad amet nonummy elit erat nibh Lorem erat elit laoreet consectetuer sed aliquip nostrud "},
+			new Object [] {"url_45", "sed aliquam ut ut consectetuer wisi euismod enim erat euismod quis exerci amet tation sit "},
+			new Object [] {"url_46", "lobortis oscillations tation aliquam dolore Lorem aliquip tation exerci ullamcorper aliquam aliquip lobortis ex tation dolor ut ut sed suscipit nisl ullamcorper sed editors laoreet aliquip enim dolor veniam tincidunt sed euismod tation "},
+			new Object [] {"url_47", "Lorem Lorem ut wisi ad ut tation consectetuer exerci convection tation ullamcorper sed dolore quis aliquam ipsum lobortis commodo nonummy "},
+			new Object [] {"url_48", "laoreet minim veniam nisl elit sit amet commodo ex ullamcorper suscipit aliquip laoreet convection Ut ex minim aliquam "},
+			new Object [] {"url_49", "lobortis nonummy minim amet sit veniam quis consectetuer tincidunt laoreet quis "},
+			new Object [] {"url_50", "lobortis nisl commodo dolor amet nibh editors enim magna minim elit euismod diam laoreet laoreet ad minim sed ut Ut lobortis adipiscing quis sed ut aliquam oscillations exerci tation consectetuer lobortis elit tincidunt consectetuer minim amet dolore quis aliquam Ut exerci sed aliquam quis quis ullamcorper Ut ex tincidunt "},
+			new Object [] {"url_51", "nostrud nisl ea erat ut suscipit Ut sit oscillations ullamcorper nonummy magna lobortis dolore editors tincidunt nostrud suscipit ex quis tation ut sit amet nostrud laoreet ex tincidunt "},
+			new Object [] {"url_52", "ea tation commodo elit sed ex sed quis enim nisl magna laoreet adipiscing amet sit nostrud consectetuer nibh tincidunt veniam ex veniam euismod exerci sed dolore suscipit nisl tincidunt euismod quis Ut enim euismod dolor diam exerci magna exerci ut exerci nisl "},
+			new Object [] {"url_53", "volutpat amet Ut lobortis dolor tation minim nonummy lobortis convection nostrud "},
+			new Object [] {"url_54", "ullamcorper commodo Ut amet sit nostrud aliquam ad amet wisi enim nostrud ipsum nisl veniam erat aliquam ex aliquam dolor dolor ut consectetuer euismod exerci elit exerci Ut ea minim enim consectetuer ad consectetuer nonummy convection adipiscing ad ullamcorper lobortis nonummy laoreet nonummy aliquam ullamcorper ad nostrud amet "},
+			new Object [] {"url_55", "wisi magna editors amet aliquam diam amet aliquip nisl consectetuer laoreet nonummy suscipit euismod diam enim tation elit ut lobortis quis euismod suscipit nostrud ea ea commodo lobortis dolore Ut nisl nostrud dolor laoreet euismod ea dolore aliquam ut Lorem exerci ex sit "},
+			new Object [] {"url_56", "ex dolor veniam wisi laoreet ut exerci diam ad ex ut ut laoreet ut nisl ullamcorper nisl "},
+			new Object [] {"url_57", "diam adipiscing Ut ut Lorem amet erat elit erat magna adipiscing euismod elit ullamcorper nostrud aliquam dolor ullamcorper sit tation tation "},
+			new Object [] {"url_58", "laoreet convection veniam lobortis dolore ut nonummy commodo erat lobortis veniam nostrud dolore minim commodo ut consectetuer magna erat ea dolore Lorem suscipit ex ipsum exerci sed enim ea tation suscipit enim adipiscing "},
+			new Object [] {"url_59", "amet ut ut Ut ad dolor quis ad magna exerci suscipit magna nibh commodo euismod amet euismod wisi diam suscipit dolore Lorem dolor ex amet exerci aliquip ut ut lobortis quis elit minim sed Lorem "},
+			new Object [] {"url_60", "ut ut amet ullamcorper amet euismod dolor amet elit exerci adipiscing sed suscipit sed exerci wisi diam veniam wisi suscipit ut quis nibh ullamcorper ex quis magna dolore volutpat editors minim ut sit aliquip oscillations nisl ipsum "},
+			new Object [] {"url_61", "nibh nostrud tincidunt lobortis adipiscing adipiscing ullamcorper ullamcorper ipsum nisl ullamcorper aliquip laoreet commodo ut tation wisi diam commodo aliquip commodo suscipit tincidunt volutpat elit enim laoreet ut nostrud ad nonummy ipsum "},
+			new Object [] {"url_62", "Ut ut minim enim amet euismod erat elit commodo consectetuer Ut quis dolor ex diam quis wisi tation tincidunt laoreet volutpat "},
+			new Object [] {"url_63", "ut erat volutpat euismod amet ea nonummy lobortis ut Ut ea veniam sed veniam nostrud "},
+			new Object [] {"url_64", "tation dolor suscipit minim nisl wisi consectetuer aliquip tation Ut commodo ut dolore consectetuer elit wisi nisl ipsum "},
+			new Object [] {"url_65", "ullamcorper nisl Lorem magna tation veniam aliquam diam amet euismod "},
+			new Object [] {"url_66", "euismod aliquam tincidunt Ut volutpat ea lobortis sit ut volutpat ut lobortis ut lobortis ut nisl amet dolor sed ipsum enim ullamcorper diam euismod nostrud wisi erat quis diam nibh Ut dolore sed amet tation enim diam "},
+			new Object [] {"url_67", "amet minim minim amet laoreet Lorem aliquam veniam elit volutpat magna adipiscing enim enim euismod laoreet sed ex sed aliquam ad ea ut adipiscing suscipit ex minim dolore minim ea laoreet nisl "},
+			new Object [] {"url_68", "aliquam ea volutpat ut wisi tation tation nibh nisl erat laoreet ea volutpat dolor dolor aliquam exerci quis ullamcorper aliquam ut quis suscipit "},
+			new Object [] {"url_69", "quis exerci ut aliquip wisi dolore magna nibh consectetuer magna tation ullamcorper lobortis sed amet adipiscing minim suscipit nibh nibh nostrud euismod enim "},
+			new Object [] {"url_70", "tation enim consectetuer adipiscing wisi laoreet diam aliquip nostrud elit nostrud aliquip ea minim amet diam dolore "},
+			new Object [] {"url_71", "consectetuer tincidunt nibh amet tation nonummy sit tation diam sed diam tation "},
+			new Object [] {"url_72", "Lorem ut nostrud nonummy minim quis euismod lobortis nostrud nonummy adipiscing tincidunt consectetuer ut nibh ad suscipit dolor ut elit dolore amet ut quis tation ullamcorper nonummy laoreet ullamcorper aliquam dolore convection dolor tincidunt ut ullamcorper ex dolor suscipit erat oscillations ad "},
+			new Object [] {"url_73", "elit Ut commodo ut ullamcorper ullamcorper ut euismod commodo diam aliquip suscipit consectetuer exerci tation nostrud ut wisi exerci sed ut elit sed volutpat Lorem nibh laoreet consectetuer ex Lorem elit aliquam commodo lobortis ad "},
+			new Object [] {"url_74", "quis magna laoreet commodo aliquam nisl ullamcorper veniam tation wisi consectetuer commodo consectetuer ad dolore aliquam dolor elit amet sit amet nibh commodo erat veniam aliquip dolore ad magna ad ipsum Ut exerci ea volutpat nisl amet nostrud sit "},
+			new Object [] {"url_75", "tincidunt suscipit sit aliquip aliquam adipiscing dolore exerci Ut suscipit ut sit laoreet suscipit wisi sit enim nonummy consectetuer dolore editors "},
+			new Object [] {"url_76", "veniam ullamcorper tation sit suscipit dolor suscipit veniam sit Lorem quis sed nostrud ad tincidunt elit adipiscing "},
+			new Object [] {"url_77", "volutpat sit amet veniam quis ipsum nibh elit enim commodo magna veniam magna convection "},
+			new Object [] {"url_78", "tation dolore minim elit nisl volutpat tation laoreet enim nostrud exerci dolore tincidunt aliquip Lorem ipsum nostrud quis adipiscing ullamcorper erat lobortis tation commodo Ut ipsum commodo magna ad ipsum ut enim "},
+			new Object [] {"url_79", "lobortis amet elit Lorem amet nonummy commodo tation ex ea amet Lorem ea nonummy commodo veniam volutpat nibh wisi ad ipsum euismod ea convection nostrud nisl erat veniam Ut aliquip ad aliquip editors wisi magna tation nostrud nonummy adipiscing ullamcorper aliquip "},
+			new Object [] {"url_80", "tincidunt nostrud nostrud magna ea euismod ea consectetuer nisl exerci ea dolor nisl commodo ex erat ipsum exerci suscipit ad nisl ea nonummy suscipit adipiscing laoreet sit euismod nibh adipiscing sed minim commodo amet "},
+			new Object [] {"url_81", "nostrud erat ut sed editors erat amet magna lobortis diam laoreet dolor amet nibh ut ipsum ipsum amet ut sed ut exerci elit suscipit wisi magna ut veniam nisl commodo enim adipiscing laoreet ad Lorem oscillations "},
+			new Object [] {"url_82", "quis commodo nibh nibh volutpat suscipit dolore magna tincidunt nibh ut ad ullamcorper ullamcorper quis enim ad ut tation minim laoreet veniam dolor sed tincidunt exerci exerci nostrud ullamcorper amet ut ut ullamcorper "},
+			new Object [] {"url_83", "sit suscipit volutpat elit tation elit sed sed dolor ex ex ipsum euismod laoreet magna lobortis ad "},
+			new Object [] {"url_84", "lobortis ipsum euismod enim ea tation veniam tation oscillations aliquip consectetuer euismod ut sed lobortis tation oscillations commodo euismod laoreet suscipit amet elit ullamcorper volutpat aliquam ea enim ullamcorper consectetuer laoreet tation quis ut commodo erat euismod dolor laoreet ullamcorper laoreet "},
+			new Object [] {"url_85", "adipiscing sit quis commodo consectetuer quis enim euismod exerci nonummy ea nostrud Ut veniam sit aliquip nisl enim "},
+			new Object [] {"url_86", "nostrud dolore veniam veniam wisi aliquip adipiscing diam sed quis ullamcorper "},
+			new Object [] {"url_87", "quis Lorem suscipit Ut nibh diam euismod consectetuer lobortis ipsum sed suscipit consectetuer euismod laoreet ut wisi nisl elit quis commodo adipiscing adipiscing suscipit aliquam nisl quis magna ipsum enim ad quis ea magna Lorem nibh ea "},
+			new Object [] {"url_88", "euismod commodo sed tincidunt Ut veniam consectetuer quis erat ex ea erat laoreet commodo nibh minim "},
+			new Object [] {"url_89", "tation diam editors Ut enim nibh Lorem volutpat quis diam suscipit exerci wisi ad "},
+			new Object [] {"url_90", "volutpat editors ea nibh wisi ad amet volutpat nisl ullamcorper nibh volutpat minim ex ut sit veniam Lorem consectetuer quis ad sit suscipit volutpat wisi diam sed tincidunt ipsum minim convection ea diam oscillations quis lobortis "},
+			new Object [] {"url_91", "enim minim nonummy ea minim euismod adipiscing editors volutpat magna sit magna ut ipsum ut "},
+			new Object [] {"url_92", "nisl Ut commodo amet euismod lobortis ea ea wisi commodo Lorem sit ipsum volutpat nonummy exerci erat elit exerci magna ad erat enim laoreet quis nostrud wisi ut veniam amet ullamcorper lobortis ad suscipit volutpat veniam nostrud nibh quis ipsum dolore consectetuer veniam ipsum aliquip dolore sed laoreet ipsum "},
+			new Object [] {"url_93", "nonummy aliquam ad lobortis Lorem erat ad tation Lorem exerci ex "},
+			new Object [] {"url_94", "nonummy dolore commodo exerci ex quis ut suscipit elit laoreet sit tation magna veniam ea sit nonummy veniam Lorem quis nibh aliquip exerci amet ullamcorper adipiscing erat nisl editors diam commodo ad euismod adipiscing ea suscipit exerci aliquip volutpat tation enim volutpat sit "},
+			new Object [] {"url_95", "sit suscipit oscillations ipsum nibh dolor ea dolore ea elit ipsum minim editors magna consectetuer ullamcorper commodo nonummy sit nostrud aliquip sit erat ullamcorper ullamcorper nibh veniam erat quis dolore nonummy "},
+			new Object [] {"url_96", "nostrud quis ut volutpat magna ad quis adipiscing Lorem commodo exerci laoreet magna adipiscing erat quis wisi ea ea laoreet enim convection ad dolor nisl amet nibh aliquam adipiscing tincidunt minim diam Lorem commodo adipiscing volutpat "},
+			new Object [] {"url_97", "laoreet laoreet suscipit nostrud dolore adipiscing volutpat Ut sed nisl diam ullamcorper ex ut ut dolor amet nostrud euismod dolore veniam veniam enim tation veniam ea minim minim volutpat tincidunt "},
+			new Object [] {"url_98", "quis lobortis amet wisi nostrud ipsum aliquam convection tincidunt dolore ullamcorper nibh lobortis volutpat ea nostrud oscillations minim nonummy enim ad lobortis exerci ipsum ullamcorper nibh nonummy diam amet enim veniam ut nostrud "},
+			new Object [] {"url_99", "aliquam wisi suscipit commodo diam amet amet magna nisl enim nostrud tation nisl nostrud nibh ut "}
 	};
 
 	public static final Object [][] RANKS = {
-			new Object [] {30,"url_0",43},
-			new Object [] {82,"url_1",39},
-			new Object [] {56,"url_2",31},
-			new Object [] {96,"url_3",36},
-			new Object [] {31,"url_4",36},
-			new Object [] {29,"url_5",6},
-			new Object [] {33,"url_6",48},
-			new Object [] {66,"url_7",40},
-			new Object [] {28,"url_8",51},
-			new Object [] {9,"url_9",4},
-			new Object [] {49,"url_10",24},
-			new Object [] {26,"url_11",12},
-			new Object [] {39,"url_12",46},
-			new Object [] {84,"url_13",53},
-			new Object [] {29,"url_14",50},
-			new Object [] {21,"url_15",12},
-			new Object [] {69,"url_16",34},
-			new Object [] {11,"url_17",38},
-			new Object [] {96,"url_18",13},
-			new Object [] {56,"url_19",48},
-			new Object [] {18,"url_20",36},
-			new Object [] {31,"url_21",21},
-			new Object [] {29,"url_22",11},
-			new Object [] {71,"url_23",30},
-			new Object [] {85,"url_24",48},
-			new Object [] {19,"url_25",45},
-			new Object [] {69,"url_26",9},
-			new Object [] {20,"url_27",51},
-			new Object [] {33,"url_28",46},
-			new Object [] {75,"url_29",38},
-			new Object [] {96,"url_30",51},
-			new Object [] {73,"url_31",40},
-			new Object [] {67,"url_32",16},
-			new Object [] {24,"url_33",24},
-			new Object [] {27,"url_34",35},
-			new Object [] {33,"url_35",35},
-			new Object [] {7,"url_36",22},
-			new Object [] {83,"url_37",41},
-			new Object [] {23,"url_38",49},
-			new Object [] {41,"url_39",33},
-			new Object [] {66,"url_40",38},
-			new Object [] {4,"url_41",52},
-			new Object [] {34,"url_42",4},
-			new Object [] {28,"url_43",12},
-			new Object [] {14,"url_44",14},
-			new Object [] {41,"url_45",11},
-			new Object [] {48,"url_46",37},
-			new Object [] {75,"url_47",41},
-			new Object [] {78,"url_48",3},
-			new Object [] {63,"url_49",28}
+			new Object [] {30, "url_0", 43},
+			new Object [] {82, "url_1", 39},
+			new Object [] {56, "url_2", 31},
+			new Object [] {96, "url_3", 36},
+			new Object [] {31, "url_4", 36},
+			new Object [] {29, "url_5", 6},
+			new Object [] {33, "url_6", 48},
+			new Object [] {66, "url_7", 40},
+			new Object [] {28, "url_8", 51},
+			new Object [] {9, "url_9", 4},
+			new Object [] {49, "url_10", 24},
+			new Object [] {26, "url_11", 12},
+			new Object [] {39, "url_12", 46},
+			new Object [] {84, "url_13", 53},
+			new Object [] {29, "url_14", 50},
+			new Object [] {21, "url_15", 12},
+			new Object [] {69, "url_16", 34},
+			new Object [] {11, "url_17", 38},
+			new Object [] {96, "url_18", 13},
+			new Object [] {56, "url_19", 48},
+			new Object [] {18, "url_20", 36},
+			new Object [] {31, "url_21", 21},
+			new Object [] {29, "url_22", 11},
+			new Object [] {71, "url_23", 30},
+			new Object [] {85, "url_24", 48},
+			new Object [] {19, "url_25", 45},
+			new Object [] {69, "url_26", 9},
+			new Object [] {20, "url_27", 51},
+			new Object [] {33, "url_28", 46},
+			new Object [] {75, "url_29", 38},
+			new Object [] {96, "url_30", 51},
+			new Object [] {73, "url_31", 40},
+			new Object [] {67, "url_32", 16},
+			new Object [] {24, "url_33", 24},
+			new Object [] {27, "url_34", 35},
+			new Object [] {33, "url_35", 35},
+			new Object [] {7, "url_36", 22},
+			new Object [] {83, "url_37", 41},
+			new Object [] {23, "url_38", 49},
+			new Object [] {41, "url_39", 33},
+			new Object [] {66, "url_40", 38},
+			new Object [] {4, "url_41", 52},
+			new Object [] {34, "url_42", 4},
+			new Object [] {28, "url_43", 12},
+			new Object [] {14, "url_44", 14},
+			new Object [] {41, "url_45", 11},
+			new Object [] {48, "url_46", 37},
+			new Object [] {75, "url_47", 41},
+			new Object [] {78, "url_48", 3},
+			new Object [] {63, "url_49", 28}
 	};
 
 
 	public static final Object [][] VISITS = {
-			new Object [] {"url_2","2003-12-17"},
-			new Object [] {"url_9","2008-11-11"},
-			new Object [] {"url_14","2003-11-5"},
-			new Object [] {"url_46","2009-2-16"},
-			new Object [] {"url_14","2004-11-9"},
-			new Object [] {"url_36","2001-3-9"},
-			new Object [] {"url_35","2006-8-13"},
-			new Object [] {"url_22","2008-1-18"},
-			new Object [] {"url_36","2002-3-9"},
-			new Object [] {"url_13","2007-7-17"},
-			new Object [] {"url_23","2009-6-16"},
-			new Object [] {"url_16","2000-7-15"},
-			new Object [] {"url_41","2002-5-10"},
-			new Object [] {"url_6","2004-11-9"},
-			new Object [] {"url_5","2003-6-7"},
-			new Object [] {"url_22","2002-11-5"},
-			new Object [] {"url_11","2007-7-21"},
-			new Object [] {"url_38","2009-12-2"},
-			new Object [] {"url_6","2004-11-2"},
-			new Object [] {"url_46","2000-6-4"},
-			new Object [] {"url_34","2003-9-2"},
-			new Object [] {"url_31","2008-2-24"},
-			new Object [] {"url_0","2003-2-2"},
-			new Object [] {"url_47","2003-7-8"},
-			new Object [] {"url_49","2009-9-13"},
-			new Object [] {"url_11","2003-4-2"},
-			new Object [] {"url_20","2000-6-18"},
-			new Object [] {"url_38","2000-2-22"},
-			new Object [] {"url_44","2009-2-17"},
-			new Object [] {"url_26","2000-6-21"},
-			new Object [] {"url_13","2000-11-25"},
-			new Object [] {"url_47","2005-4-19"},
-			new Object [] {"url_46","2008-1-7"},
-			new Object [] {"url_33","2004-12-24"},
-			new Object [] {"url_32","2009-2-8"},
-			new Object [] {"url_26","2000-9-21"},
-			new Object [] {"url_9","2002-8-18"},
-			new Object [] {"url_38","2002-11-27"},
-			new Object [] {"url_37","2008-2-26"},
-			new Object [] {"url_1","2007-3-22"},
-			new Object [] {"url_37","2002-3-20"},
-			new Object [] {"url_27","2008-11-12"},
-			new Object [] {"url_30","2000-12-16"},
-			new Object [] {"url_48","2000-12-17"},
-			new Object [] {"url_46","2008-4-16"},
-			new Object [] {"url_29","2006-3-9"},
-			new Object [] {"url_0","2007-7-26"},
-			new Object [] {"url_46","2009-12-15"},
-			new Object [] {"url_34","2002-2-13"},
-			new Object [] {"url_24","2009-3-1"},
-			new Object [] {"url_43","2007-11-4"},
-			new Object [] {"url_3","2004-2-16"},
-			new Object [] {"url_26","2000-10-26"},
-			new Object [] {"url_42","2004-7-14"},
-			new Object [] {"url_13","2004-9-10"},
-			new Object [] {"url_21","2000-2-21"},
-			new Object [] {"url_9","2006-6-5"},
-			new Object [] {"url_46","2001-12-17"},
-			new Object [] {"url_24","2006-12-8"},
-			new Object [] {"url_25","2006-9-2"},
-			new Object [] {"url_37","2002-6-26"},
-			new Object [] {"url_18","2006-6-2"},
-			new Object [] {"url_46","2003-5-24"},
-			new Object [] {"url_32","2000-10-17"},
-			new Object [] {"url_45","2002-1-12"},
-			new Object [] {"url_12","2005-12-13"},
-			new Object [] {"url_49","2009-3-9"},
-			new Object [] {"url_31","2001-9-19"},
-			new Object [] {"url_22","2002-7-9"},
-			new Object [] {"url_27","2005-2-3"},
-			new Object [] {"url_43","2008-7-15"},
-			new Object [] {"url_20","2000-3-23"},
-			new Object [] {"url_25","2002-5-8"},
-			new Object [] {"url_41","2004-4-27"},
-			new Object [] {"url_17","2008-7-17"},
-			new Object [] {"url_26","2009-12-16"},
-			new Object [] {"url_34","2006-2-10"},
-			new Object [] {"url_8","2009-4-14"},
-			new Object [] {"url_16","2000-2-24"},
-			new Object [] {"url_2","2009-2-10"},
-			new Object [] {"url_35","2003-2-24"},
-			new Object [] {"url_34","2008-3-16"},
-			new Object [] {"url_27","2005-1-5"},
-			new Object [] {"url_8","2008-12-10"},
-			new Object [] {"url_38","2009-2-11"},
-			new Object [] {"url_38","2006-11-3"},
-			new Object [] {"url_47","2003-2-13"},
-			new Object [] {"url_8","2008-11-17"},
-			new Object [] {"url_26","2009-5-11"},
-			new Object [] {"url_12","2007-11-26"},
-			new Object [] {"url_10","2003-1-13"},
-			new Object [] {"url_8","2005-9-23"},
-			new Object [] {"url_42","2001-4-5"},
-			new Object [] {"url_30","2009-12-10"},
-			new Object [] {"url_2","2003-1-3"},
-			new Object [] {"url_2","2009-2-19"},
-			new Object [] {"url_7","2000-6-25"},
-			new Object [] {"url_15","2004-9-26"},
-			new Object [] {"url_25","2009-10-5"},
-			new Object [] {"url_23","2009-8-9"},
-			new Object [] {"url_27","2004-4-3"},
-			new Object [] {"url_37","2008-6-9"},
-			new Object [] {"url_9","2002-5-25"},
-			new Object [] {"url_43","2009-5-18"},
-			new Object [] {"url_21","2008-4-19"},
-			new Object [] {"url_12","2001-12-25"},
-			new Object [] {"url_16","2006-9-25"},
-			new Object [] {"url_27","2002-1-2"},
-			new Object [] {"url_2","2009-1-21"},
-			new Object [] {"url_31","2009-3-20"},
-			new Object [] {"url_42","2002-3-1"},
-			new Object [] {"url_31","2001-11-26"},
-			new Object [] {"url_20","2003-5-15"},
-			new Object [] {"url_32","2004-1-22"},
-			new Object [] {"url_28","2008-9-16"},
-			new Object [] {"url_27","2006-7-3"},
-			new Object [] {"url_11","2008-12-26"},
-			new Object [] {"url_15","2004-8-16"},
-			new Object [] {"url_34","2002-10-5"},
-			new Object [] {"url_44","2000-2-15"},
-			new Object [] {"url_9","2000-10-23"},
-			new Object [] {"url_45","2005-4-24"},
-			new Object [] {"url_0","2006-8-7"},
-			new Object [] {"url_48","2003-8-7"},
-			new Object [] {"url_8","2007-12-13"},
-			new Object [] {"url_42","2003-8-2"},
-			new Object [] {"url_25","2008-3-5"},
-			new Object [] {"url_3","2007-3-9"},
-			new Object [] {"url_49","2003-10-7"},
-			new Object [] {"url_18","2007-12-6"},
-			new Object [] {"url_3","2006-7-5"},
-			new Object [] {"url_27","2000-9-14"},
-			new Object [] {"url_42","2002-10-20"},
-			new Object [] {"url_44","2007-1-13"},
-			new Object [] {"url_6","2003-1-21"},
-			new Object [] {"url_40","2009-10-20"},
-			new Object [] {"url_28","2009-6-17"},
-			new Object [] {"url_22","2000-2-17"},
-			new Object [] {"url_3","2005-1-15"},
-			new Object [] {"url_9","2008-12-9"},
-			new Object [] {"url_9","2005-2-19"},
-			new Object [] {"url_28","2000-4-22"},
-			new Object [] {"url_44","2001-9-9"},
-			new Object [] {"url_43","2008-6-21"},
-			new Object [] {"url_39","2008-5-9"},
-			new Object [] {"url_15","2006-9-15"},
-			new Object [] {"url_23","2001-12-18"},
-			new Object [] {"url_14","2002-5-23"},
-			new Object [] {"url_11","2007-7-11"},
-			new Object [] {"url_34","2000-12-8"},
-			new Object [] {"url_47","2005-7-3"},
-			new Object [] {"url_38","2004-3-26"},
-			new Object [] {"url_19","2003-9-14"},
-			new Object [] {"url_24","2007-7-16"},
-			new Object [] {"url_40","2008-8-21"},
-			new Object [] {"url_17","2007-12-4"},
-			new Object [] {"url_25","2006-6-24"},
-			new Object [] {"url_2","2000-10-8"},
-			new Object [] {"url_12","2008-6-10"},
-			new Object [] {"url_11","2004-11-24"},
-			new Object [] {"url_13","2005-11-3"},
-			new Object [] {"url_43","2005-1-2"},
-			new Object [] {"url_14","2008-6-12"},
-			new Object [] {"url_43","2001-8-27"},
-			new Object [] {"url_45","2000-3-3"},
-			new Object [] {"url_0","2006-9-27"},
-			new Object [] {"url_22","2007-12-18"},
-			new Object [] {"url_25","2006-4-4"},
-			new Object [] {"url_32","2001-6-25"},
-			new Object [] {"url_6","2007-6-9"},
-			new Object [] {"url_8","2009-10-3"},
-			new Object [] {"url_15","2003-2-23"},
-			new Object [] {"url_37","2000-5-6"},
-			new Object [] {"url_27","2004-3-21"},
-			new Object [] {"url_17","2005-6-20"},
-			new Object [] {"url_2","2004-2-27"},
-			new Object [] {"url_36","2005-3-16"},
-			new Object [] {"url_1","2009-12-3"},
-			new Object [] {"url_9","2004-4-27"},
-			new Object [] {"url_18","2009-5-26"},
-			new Object [] {"url_31","2000-9-21"},
-			new Object [] {"url_12","2008-9-25"},
-			new Object [] {"url_2","2004-2-16"},
-			new Object [] {"url_28","2008-11-12"},
-			new Object [] {"url_28","2001-6-26"},
-			new Object [] {"url_12","2006-3-15"},
-			new Object [] {"url_0","2009-3-1"},
-			new Object [] {"url_36","2006-10-13"},
-			new Object [] {"url_15","2004-11-5"},
-			new Object [] {"url_32","2008-2-11"},
-			new Object [] {"url_19","2009-8-3"},
-			new Object [] {"url_2","2006-8-6"},
-			new Object [] {"url_11","2009-10-13"},
-			new Object [] {"url_21","2002-9-14"},
-			new Object [] {"url_18","2000-11-2"},
-			new Object [] {"url_35","2006-5-15"},
-			new Object [] {"url_11","2006-2-18"},
-			new Object [] {"url_0","2001-4-25"},
-			new Object [] {"url_14","2009-4-8"},
-			new Object [] {"url_16","2009-4-7"}
+			new Object [] {"url_2", "2003-12-17"},
+			new Object [] {"url_9", "2008-11-11"},
+			new Object [] {"url_14", "2003-11-5"},
+			new Object [] {"url_46", "2009-2-16"},
+			new Object [] {"url_14", "2004-11-9"},
+			new Object [] {"url_36", "2001-3-9"},
+			new Object [] {"url_35", "2006-8-13"},
+			new Object [] {"url_22", "2008-1-18"},
+			new Object [] {"url_36", "2002-3-9"},
+			new Object [] {"url_13", "2007-7-17"},
+			new Object [] {"url_23", "2009-6-16"},
+			new Object [] {"url_16", "2000-7-15"},
+			new Object [] {"url_41", "2002-5-10"},
+			new Object [] {"url_6", "2004-11-9"},
+			new Object [] {"url_5", "2003-6-7"},
+			new Object [] {"url_22", "2002-11-5"},
+			new Object [] {"url_11", "2007-7-21"},
+			new Object [] {"url_38", "2009-12-2"},
+			new Object [] {"url_6", "2004-11-2"},
+			new Object [] {"url_46", "2000-6-4"},
+			new Object [] {"url_34", "2003-9-2"},
+			new Object [] {"url_31", "2008-2-24"},
+			new Object [] {"url_0", "2003-2-2"},
+			new Object [] {"url_47", "2003-7-8"},
+			new Object [] {"url_49", "2009-9-13"},
+			new Object [] {"url_11", "2003-4-2"},
+			new Object [] {"url_20", "2000-6-18"},
+			new Object [] {"url_38", "2000-2-22"},
+			new Object [] {"url_44", "2009-2-17"},
+			new Object [] {"url_26", "2000-6-21"},
+			new Object [] {"url_13", "2000-11-25"},
+			new Object [] {"url_47", "2005-4-19"},
+			new Object [] {"url_46", "2008-1-7"},
+			new Object [] {"url_33", "2004-12-24"},
+			new Object [] {"url_32", "2009-2-8"},
+			new Object [] {"url_26", "2000-9-21"},
+			new Object [] {"url_9", "2002-8-18"},
+			new Object [] {"url_38", "2002-11-27"},
+			new Object [] {"url_37", "2008-2-26"},
+			new Object [] {"url_1", "2007-3-22"},
+			new Object [] {"url_37", "2002-3-20"},
+			new Object [] {"url_27", "2008-11-12"},
+			new Object [] {"url_30", "2000-12-16"},
+			new Object [] {"url_48", "2000-12-17"},
+			new Object [] {"url_46", "2008-4-16"},
+			new Object [] {"url_29", "2006-3-9"},
+			new Object [] {"url_0", "2007-7-26"},
+			new Object [] {"url_46", "2009-12-15"},
+			new Object [] {"url_34", "2002-2-13"},
+			new Object [] {"url_24", "2009-3-1"},
+			new Object [] {"url_43", "2007-11-4"},
+			new Object [] {"url_3", "2004-2-16"},
+			new Object [] {"url_26", "2000-10-26"},
+			new Object [] {"url_42", "2004-7-14"},
+			new Object [] {"url_13", "2004-9-10"},
+			new Object [] {"url_21", "2000-2-21"},
+			new Object [] {"url_9", "2006-6-5"},
+			new Object [] {"url_46", "2001-12-17"},
+			new Object [] {"url_24", "2006-12-8"},
+			new Object [] {"url_25", "2006-9-2"},
+			new Object [] {"url_37", "2002-6-26"},
+			new Object [] {"url_18", "2006-6-2"},
+			new Object [] {"url_46", "2003-5-24"},
+			new Object [] {"url_32", "2000-10-17"},
+			new Object [] {"url_45", "2002-1-12"},
+			new Object [] {"url_12", "2005-12-13"},
+			new Object [] {"url_49", "2009-3-9"},
+			new Object [] {"url_31", "2001-9-19"},
+			new Object [] {"url_22", "2002-7-9"},
+			new Object [] {"url_27", "2005-2-3"},
+			new Object [] {"url_43", "2008-7-15"},
+			new Object [] {"url_20", "2000-3-23"},
+			new Object [] {"url_25", "2002-5-8"},
+			new Object [] {"url_41", "2004-4-27"},
+			new Object [] {"url_17", "2008-7-17"},
+			new Object [] {"url_26", "2009-12-16"},
+			new Object [] {"url_34", "2006-2-10"},
+			new Object [] {"url_8", "2009-4-14"},
+			new Object [] {"url_16", "2000-2-24"},
+			new Object [] {"url_2", "2009-2-10"},
+			new Object [] {"url_35", "2003-2-24"},
+			new Object [] {"url_34", "2008-3-16"},
+			new Object [] {"url_27", "2005-1-5"},
+			new Object [] {"url_8", "2008-12-10"},
+			new Object [] {"url_38", "2009-2-11"},
+			new Object [] {"url_38", "2006-11-3"},
+			new Object [] {"url_47", "2003-2-13"},
+			new Object [] {"url_8", "2008-11-17"},
+			new Object [] {"url_26", "2009-5-11"},
+			new Object [] {"url_12", "2007-11-26"},
+			new Object [] {"url_10", "2003-1-13"},
+			new Object [] {"url_8", "2005-9-23"},
+			new Object [] {"url_42", "2001-4-5"},
+			new Object [] {"url_30", "2009-12-10"},
+			new Object [] {"url_2", "2003-1-3"},
+			new Object [] {"url_2", "2009-2-19"},
+			new Object [] {"url_7", "2000-6-25"},
+			new Object [] {"url_15", "2004-9-26"},
+			new Object [] {"url_25", "2009-10-5"},
+			new Object [] {"url_23", "2009-8-9"},
+			new Object [] {"url_27", "2004-4-3"},
+			new Object [] {"url_37", "2008-6-9"},
+			new Object [] {"url_9", "2002-5-25"},
+			new Object [] {"url_43", "2009-5-18"},
+			new Object [] {"url_21", "2008-4-19"},
+			new Object [] {"url_12", "2001-12-25"},
+			new Object [] {"url_16", "2006-9-25"},
+			new Object [] {"url_27", "2002-1-2"},
+			new Object [] {"url_2", "2009-1-21"},
+			new Object [] {"url_31", "2009-3-20"},
+			new Object [] {"url_42", "2002-3-1"},
+			new Object [] {"url_31", "2001-11-26"},
+			new Object [] {"url_20", "2003-5-15"},
+			new Object [] {"url_32", "2004-1-22"},
+			new Object [] {"url_28", "2008-9-16"},
+			new Object [] {"url_27", "2006-7-3"},
+			new Object [] {"url_11", "2008-12-26"},
+			new Object [] {"url_15", "2004-8-16"},
+			new Object [] {"url_34", "2002-10-5"},
+			new Object [] {"url_44", "2000-2-15"},
+			new Object [] {"url_9", "2000-10-23"},
+			new Object [] {"url_45", "2005-4-24"},
+			new Object [] {"url_0", "2006-8-7"},
+			new Object [] {"url_48", "2003-8-7"},
+			new Object [] {"url_8", "2007-12-13"},
+			new Object [] {"url_42", "2003-8-2"},
+			new Object [] {"url_25", "2008-3-5"},
+			new Object [] {"url_3", "2007-3-9"},
+			new Object [] {"url_49", "2003-10-7"},
+			new Object [] {"url_18", "2007-12-6"},
+			new Object [] {"url_3", "2006-7-5"},
+			new Object [] {"url_27", "2000-9-14"},
+			new Object [] {"url_42", "2002-10-20"},
+			new Object [] {"url_44", "2007-1-13"},
+			new Object [] {"url_6", "2003-1-21"},
+			new Object [] {"url_40", "2009-10-20"},
+			new Object [] {"url_28", "2009-6-17"},
+			new Object [] {"url_22", "2000-2-17"},
+			new Object [] {"url_3", "2005-1-15"},
+			new Object [] {"url_9", "2008-12-9"},
+			new Object [] {"url_9", "2005-2-19"},
+			new Object [] {"url_28", "2000-4-22"},
+			new Object [] {"url_44", "2001-9-9"},
+			new Object [] {"url_43", "2008-6-21"},
+			new Object [] {"url_39", "2008-5-9"},
+			new Object [] {"url_15", "2006-9-15"},
+			new Object [] {"url_23", "2001-12-18"},
+			new Object [] {"url_14", "2002-5-23"},
+			new Object [] {"url_11", "2007-7-11"},
+			new Object [] {"url_34", "2000-12-8"},
+			new Object [] {"url_47", "2005-7-3"},
+			new Object [] {"url_38", "2004-3-26"},
+			new Object [] {"url_19", "2003-9-14"},
+			new Object [] {"url_24", "2007-7-16"},
+			new Object [] {"url_40", "2008-8-21"},
+			new Object [] {"url_17", "2007-12-4"},
+			new Object [] {"url_25", "2006-6-24"},
+			new Object [] {"url_2", "2000-10-8"},
+			new Object [] {"url_12", "2008-6-10"},
+			new Object [] {"url_11", "2004-11-24"},
+			new Object [] {"url_13", "2005-11-3"},
+			new Object [] {"url_43", "2005-1-2"},
+			new Object [] {"url_14", "2008-6-12"},
+			new Object [] {"url_43", "2001-8-27"},
+			new Object [] {"url_45", "2000-3-3"},
+			new Object [] {"url_0", "2006-9-27"},
+			new Object [] {"url_22", "2007-12-18"},
+			new Object [] {"url_25", "2006-4-4"},
+			new Object [] {"url_32", "2001-6-25"},
+			new Object [] {"url_6", "2007-6-9"},
+			new Object [] {"url_8", "2009-10-3"},
+			new Object [] {"url_15", "2003-2-23"},
+			new Object [] {"url_37", "2000-5-6"},
+			new Object [] {"url_27", "2004-3-21"},
+			new Object [] {"url_17", "2005-6-20"},
+			new Object [] {"url_2", "2004-2-27"},
+			new Object [] {"url_36", "2005-3-16"},
+			new Object [] {"url_1", "2009-12-3"},
+			new Object [] {"url_9", "2004-4-27"},
+			new Object [] {"url_18", "2009-5-26"},
+			new Object [] {"url_31", "2000-9-21"},
+			new Object [] {"url_12", "2008-9-25"},
+			new Object [] {"url_2", "2004-2-16"},
+			new Object [] {"url_28", "2008-11-12"},
+			new Object [] {"url_28", "2001-6-26"},
+			new Object [] {"url_12", "2006-3-15"},
+			new Object [] {"url_0", "2009-3-1"},
+			new Object [] {"url_36", "2006-10-13"},
+			new Object [] {"url_15", "2004-11-5"},
+			new Object [] {"url_32", "2008-2-11"},
+			new Object [] {"url_19", "2009-8-3"},
+			new Object [] {"url_2", "2006-8-6"},
+			new Object [] {"url_11", "2009-10-13"},
+			new Object [] {"url_21", "2002-9-14"},
+			new Object [] {"url_18", "2000-11-2"},
+			new Object [] {"url_35", "2006-5-15"},
+			new Object [] {"url_11", "2006-2-18"},
+			new Object [] {"url_0", "2001-4-25"},
+			new Object [] {"url_14", "2009-4-8"},
+			new Object [] {"url_16", "2009-4-7"}
 	};
 
 	public static DataSet<Tuple2<String, String>> getDocumentDataSet(ExecutionEnvironment env) {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogDataGenerator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogDataGenerator.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogDataGenerator.java
index e8dbe25..f68ece1 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogDataGenerator.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/util/WebLogDataGenerator.java
@@ -18,30 +18,30 @@
 
 package org.apache.flink.examples.java.relational.util;
 
+import org.apache.flink.examples.java.relational.WebLogAnalysis;
+
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Calendar;
 import java.util.Random;
 
-import org.apache.flink.examples.java.relational.WebLogAnalysis;
-
 /**
- * Data generator for the {@link WebLogAnalysis} example program. 
+ * Data generator for the {@link WebLogAnalysis} example program.
  *
  */
 public class WebLogDataGenerator {
 
 	/**
 	 * Main method to generate data for the {@link WebLogAnalysis} example program.
-	 * <p>
-	 * The generator creates to files:
+	 *
+	 * <p>The generator creates to files:
 	 * <ul>
 	 * <li><code>{tmp.dir}/documents</code> for the web documents
 	 * <li><code>{tmp.dir}/ranks</code> for the ranks of the web documents
 	 * <li><code>{tmp.dir}/visits</code> for the logged visits of web documents
-	 * </ul> 
-	 * 
-	 * @param args 
+	 * </ul>
+	 *
+	 * @param args
 	 * <ol>
 	 * <li>Int: Number of web documents
 	 * <li>Int: Number of visits
@@ -54,10 +54,10 @@ public class WebLogDataGenerator {
 			System.out.println("WebLogDataGenerator <numberOfDocuments> <numberOfVisits>");
 			System.exit(1);
 		}
-		
+
 		int noDocs = Integer.parseInt(args[0]);
 		int noVisits = Integer.parseInt(args[1]);
-		
+
 		String[] filterKWs = { "editors", "oscillations", "convection" };
 
 		String[] words = { "Lorem", "ipsum", "dolor", "sit", "amet",
@@ -68,7 +68,6 @@ public class WebLogDataGenerator {
 				"ullamcorper", "suscipit", "lobortis", "nisl", "ut", "aliquip",
 				"ex", "ea", "commodo" };
 
-		
 		final String outPath = System.getProperty("java.io.tmpdir");
 
 		System.out.println("Generating documents files...");
@@ -85,7 +84,7 @@ public class WebLogDataGenerator {
 	 * Generates the files for the documents relation. The entries apply the
 	 * following format: <br />
 	 * <code>URL | Content</code>
-	 * 
+	 *
 	 * @param noDocs
 	 *            Number of entries for the documents relation
 	 * @param filterKeyWords
@@ -127,7 +126,7 @@ public class WebLogDataGenerator {
 	 * Generates the files for the ranks relation. The ranks entries apply the
 	 * following format: <br />
 	 * <code>Rank | URL | Average Duration |\n</code>
-	 * 
+	 *
 	 * @param noDocs
 	 *            Number of entries in the documents relation
 	 * @param path
@@ -157,7 +156,7 @@ public class WebLogDataGenerator {
 	 * Generates the files for the visits relation. The visits entries apply the
 	 * following format:<br />
 	 * <code>IP Address | URL | Date (YYYY-MM-DD) | Misc. Data (e.g. User-Agent) |\n</code>
-	 * 
+	 *
 	 * @param noVisits
 	 *            Number of entries for the visits relation
 	 * @param noDocs

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCount.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCount.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCount.java
index 8538a20..c494c6f 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCount.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCount.java
@@ -28,31 +28,28 @@ import org.apache.flink.util.Collector;
 
 /**
  * Implements the "WordCount" program that computes a simple word occurrence histogram
- * over text files. 
- * 
- * <p>
- * The input is a plain text file with lines separated by newline characters.
- * 
- * <p>
- * Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
+ * over text files.
+ *
+ * <p>The input is a plain text file with lines separated by newline characters.
+ *
+ * <p>Usage: <code>WordCount --input &lt;path&gt; --output &lt;path&gt;</code><br>
  * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * 
- * <p>
- * This example shows how to:
+ *
+ * <p>This example shows how to:
  * <ul>
  * <li>write a simple Flink program.
  * <li>use Tuple data types.
- * <li>write and use user-defined functions. 
+ * <li>write and use user-defined functions.
  * </ul>
- * 
+ *
  */
 @SuppressWarnings("serial")
 public class WordCount {
-	
+
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
 
 		final ParameterTool params = ParameterTool.fromArgs(args);
@@ -75,7 +72,7 @@ public class WordCount {
 			text = WordCountData.getDefaultTextLineDataSet(env);
 		}
 
-		DataSet<Tuple2<String, Integer>> counts = 
+		DataSet<Tuple2<String, Integer>> counts =
 				// split up the lines in pairs (2-tuples) containing: (word,1)
 				text.flatMap(new Tokenizer())
 				// group by the tuple field "0" and sum up tuple field "1"
@@ -97,10 +94,10 @@ public class WordCount {
 	// *************************************************************************
 	//     USER FUNCTIONS
 	// *************************************************************************
-	
+
 	/**
 	 * Implements the string tokenizer that splits sentences into words as a user-defined
-	 * FlatMapFunction. The function takes a line (String) and splits it into 
+	 * FlatMapFunction. The function takes a line (String) and splits it into
 	 * multiple pairs in the form of "(word,1)" ({@code Tuple2<String, Integer>}).
 	 */
 	public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> {
@@ -109,7 +106,7 @@ public class WordCount {
 		public void flatMap(String value, Collector<Tuple2<String, Integer>> out) {
 			// normalize and split the line
 			String[] tokens = value.toLowerCase().split("\\W+");
-			
+
 			// emit the pairs
 			for (String token : tokens) {
 				if (token.length() > 0) {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCountPojo.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCountPojo.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCountPojo.java
index 1ad15d8..6494fbf 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCountPojo.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/wordcount/WordCountPojo.java
@@ -33,49 +33,49 @@ import org.apache.flink.util.Collector;
  */
 @SuppressWarnings("serial")
 public class WordCountPojo {
-	
+
 	/**
 	 * This is the POJO (Plain Old Java Object) that is being used
 	 * for all the operations.
 	 * As long as all fields are public or have a getter/setter, the system can handle them
 	 */
 	public static class Word {
-		
+
 		// fields
 		private String word;
 		private int frequency;
-		
+
 		// constructors
 		public Word() {}
-		
+
 		public Word(String word, int i) {
 			this.word = word;
 			this.frequency = i;
 		}
-		
+
 		// getters setters
 		public String getWord() {
 			return word;
 		}
-		
+
 		public void setWord(String word) {
 			this.word = word;
 		}
-		
+
 		public int getFrequency() {
 			return frequency;
 		}
-		
+
 		public void setFrequency(int frequency) {
 			this.frequency = frequency;
 		}
 
 		@Override
 		public String toString() {
-			return "Word="+word+" freq="+frequency;
+			return "Word=" + word + " freq=" + frequency;
 		}
 	}
-	
+
 	public static void main(String[] args) throws Exception {
 
 		final ParameterTool params = ParameterTool.fromArgs(args);
@@ -85,7 +85,7 @@ public class WordCountPojo {
 
 		// make parameters available in the web interface
 		env.getConfig().setGlobalJobParameters(params);
-		
+
 		// get input data
 		DataSet<String> text;
 		if (params.has("input")) {
@@ -98,7 +98,7 @@ public class WordCountPojo {
 			text = WordCountData.getDefaultTextLineDataSet(env);
 		}
 
-		DataSet<Word> counts = 
+		DataSet<Word> counts =
 			// split up the lines into Word objects (with frequency = 1)
 			text.flatMap(new Tokenizer())
 			// group by the field word and sum up the frequency
@@ -106,10 +106,10 @@ public class WordCountPojo {
 			.reduce(new ReduceFunction<Word>() {
 				@Override
 				public Word reduce(Word value1, Word value2) throws Exception {
-					return new Word(value1.word,value1.frequency + value2.frequency);
+					return new Word(value1.word, value1.frequency + value2.frequency);
 				}
 			});
-		
+
 		if (params.has("output")) {
 			counts.writeAsText(params.get("output"), WriteMode.OVERWRITE);
 			// execute program
@@ -120,14 +120,14 @@ public class WordCountPojo {
 		}
 
 	}
-	
+
 	// *************************************************************************
 	//     USER FUNCTIONS
 	// *************************************************************************
-	
+
 	/**
 	 * Implements the string tokenizer that splits sentences into words as a user-defined
-	 * FlatMapFunction. The function takes a line (String) and splits it into 
+	 * FlatMapFunction. The function takes a line (String) and splits it into
 	 * multiple Word objects.
 	 */
 	public static final class Tokenizer implements FlatMapFunction<String, Word> {
@@ -136,7 +136,7 @@ public class WordCountPojo {
 		public void flatMap(String value, Collector<Word> out) {
 			// normalize and split the line
 			String[] tokens = value.toLowerCase().split("\\W+");
-			
+
 			// emit the pairs
 			for (String token : tokens) {
 				if (token.length() > 0) {
@@ -145,5 +145,5 @@ public class WordCountPojo {
 			}
 		}
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/EnumTriangles.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/EnumTriangles.scala b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/EnumTriangles.scala
index 0a59699..32adeb5 100644
--- a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/EnumTriangles.scala
+++ b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/EnumTriangles.scala
@@ -18,16 +18,15 @@
 
 package org.apache.flink.examples.scala.graph
 
+import org.apache.flink.api.common.functions.GroupReduceFunction
+import org.apache.flink.api.common.operators.Order
 import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields
 import org.apache.flink.api.java.utils.ParameterTool
-import org.apache.flink.api.scala._
-import scala.collection.JavaConverters._
-import org.apache.flink.api.scala.ExecutionEnvironment
-import org.apache.flink.api.common.functions.GroupReduceFunction
-import org.apache.flink.util.Collector
+import org.apache.flink.api.scala.{ExecutionEnvironment, _}
 import org.apache.flink.examples.java.graph.util.EnumTrianglesData
-import org.apache.flink.api.common.operators.Order
+import org.apache.flink.util.Collector
 
+import scala.collection.JavaConverters._
 import scala.collection.mutable
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/PageRankBasic.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/PageRankBasic.scala b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/PageRankBasic.scala
index 1f842d5..4692dbd 100644
--- a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/PageRankBasic.scala
+++ b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/graph/PageRankBasic.scala
@@ -20,11 +20,10 @@ package org.apache.flink.examples.scala.graph
 import java.lang.Iterable
 
 import org.apache.flink.api.common.functions.GroupReduceFunction
+import org.apache.flink.api.java.aggregation.Aggregations.SUM
 import org.apache.flink.api.java.utils.ParameterTool
 import org.apache.flink.api.scala._
 import org.apache.flink.examples.java.graph.util.PageRankData
-import org.apache.flink.api.java.aggregation.Aggregations.SUM
-
 import org.apache.flink.util.Collector
 
 import scala.collection.JavaConverters._

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala
index f0d21f2..d510a36 100644
--- a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala
+++ b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery10.scala
@@ -18,11 +18,10 @@
 
 package org.apache.flink.examples.scala.relational
 
+import org.apache.flink.api.java.aggregation.Aggregations
 import org.apache.flink.api.java.utils.ParameterTool
 import org.apache.flink.api.scala._
 
-import org.apache.flink.api.java.aggregation.Aggregations
-
 /**
  * This program implements a modified version of the TPC-H query 10. 
  * 

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery3.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery3.scala b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery3.scala
index d6d1846..3d6e9ab 100644
--- a/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery3.scala
+++ b/flink-examples/flink-examples-batch/src/main/scala/org/apache/flink/examples/scala/relational/TPCHQuery3.scala
@@ -18,11 +18,10 @@
 
 package org.apache.flink.examples.scala.relational
 
+import org.apache.flink.api.java.aggregation.Aggregations
 import org.apache.flink.api.java.utils.ParameterTool
 import org.apache.flink.api.scala._
 
-import org.apache.flink.api.java.aggregation.Aggregations
-
 /**
  * This program implements a modified version of the TPC-H query 3. The
  * example demonstrates how to assign names to fields by extending the Tuple class.

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/async/AsyncIOExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/async/AsyncIOExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/async/AsyncIOExample.java
index 9b1f78f..748cb82 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/async/AsyncIOExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/async/AsyncIOExample.java
@@ -30,9 +30,10 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.functions.async.AsyncFunction;
 import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
-import org.apache.flink.streaming.api.functions.source.SourceFunction;
 import org.apache.flink.streaming.api.functions.async.collector.AsyncCollector;
+import org.apache.flink.streaming.api.functions.source.SourceFunction;
 import org.apache.flink.util.Collector;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,7 +46,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 /**
- * Example to illustrates how to use {@link AsyncFunction}
+ * Example to illustrates how to use {@link AsyncFunction}.
  */
 public class AsyncIOExample {
 
@@ -73,8 +74,9 @@ public class AsyncIOExample {
 
 		@Override
 		public void restoreState(List<Integer> state) throws Exception {
-			for (Integer i : state)
+			for (Integer i : state) {
 				this.start = i;
+			}
 		}
 
 		public SimpleSource(int maxNum) {
@@ -107,8 +109,8 @@ public class AsyncIOExample {
 	/**
 	 * An sample of {@link AsyncFunction} using a thread pool and executing working threads
 	 * to simulate multiple async operations.
-	 * <p>
-	 * For the real use case in production environment, the thread pool may stay in the
+	 *
+	 * <p>For the real use case in production environment, the thread pool may stay in the
 	 * async client.
 	 */
 	private static class SampleAsyncFunction extends RichAsyncFunction<Integer, String> {
@@ -139,7 +141,6 @@ public class AsyncIOExample {
 			this.shutdownWaitTS = shutdownWaitTS;
 		}
 
-
 		@Override
 		public void open(Configuration parameters) throws Exception {
 			super.open(parameters);

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/IterateExample.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/IterateExample.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/IterateExample.java
index 5859ad4..d123615 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/IterateExample.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/IterateExample.java
@@ -65,7 +65,7 @@ public class IterateExample {
 		// continuous flushing of the output buffers (lowest latency)
 		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment()
 				.setBufferTimeout(1);
-		
+
 		// make parameters available in the web interface
 		env.getConfig().setGlobalJobParameters(params);
 
@@ -115,7 +115,7 @@ public class IterateExample {
 	// *************************************************************************
 
 	/**
-	 * Generate BOUND number of random integer pairs from the range from 0 to BOUND/2
+	 * Generate BOUND number of random integer pairs from the range from 0 to BOUND/2.
 	 */
 	private static class RandomFibonacciSource implements SourceFunction<Tuple2<Integer, Integer>> {
 		private static final long serialVersionUID = 1L;
@@ -145,7 +145,7 @@ public class IterateExample {
 	}
 
 	/**
-	 * Generate random integer pairs from the range from 0 to BOUND/2
+	 * Generate random integer pairs from the range from 0 to BOUND/2.
 	 */
 	private static class FibonacciInputMap implements MapFunction<String, Tuple2<Integer, Integer>> {
 		private static final long serialVersionUID = 1L;
@@ -159,8 +159,8 @@ public class IterateExample {
 	}
 
 	/**
-	 * Map the inputs so that the next Fibonacci numbers can be calculated while preserving the original input tuple A
-	 * counter is attached to the tuple and incremented in every iteration step
+	 * Map the inputs so that the next Fibonacci numbers can be calculated while preserving the original input tuple.
+	 * A counter is attached to the tuple and incremented in every iteration step.
 	 */
 	public static class InputMap implements MapFunction<Tuple2<Integer, Integer>, Tuple5<Integer, Integer, Integer,
 			Integer, Integer>> {
@@ -174,7 +174,7 @@ public class IterateExample {
 	}
 
 	/**
-	 * Iteration step function that calculates the next Fibonacci number
+	 * Iteration step function that calculates the next Fibonacci number.
 	 */
 	public static class Step implements
 			MapFunction<Tuple5<Integer, Integer, Integer, Integer, Integer>, Tuple5<Integer, Integer, Integer,
@@ -207,7 +207,7 @@ public class IterateExample {
 	}
 
 	/**
-	 * Giving back the input pair and the counter
+	 * Giving back the input pair and the counter.
 	 */
 	public static class OutputMap implements MapFunction<Tuple5<Integer, Integer, Integer, Integer, Integer>,
 			Tuple2<Tuple2<Integer, Integer>, Integer>> {

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/util/IterateExampleData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/util/IterateExampleData.java b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/util/IterateExampleData.java
index 0077459..cfe5e73 100644
--- a/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/util/IterateExampleData.java
+++ b/flink-examples/flink-examples-streaming/src/main/java/org/apache/flink/streaming/examples/iteration/util/IterateExampleData.java
@@ -17,6 +17,9 @@
 
 package org.apache.flink.streaming.examples.iteration.util;
 
+/**
+ * Data for IterateExampleITCase.
+ */
 public class IterateExampleData {
 	public static final String INPUT_PAIRS = "(1,40)\n" + "(29,38)\n" + "(11,15)\n" + "(17,39)\n" + "(24,41)\n" +
 			"(7,33)\n" + "(20,2)\n" + "(11,5)\n" + "(3,16)\n" + "(23,36)\n" + "(15,23)\n" + "(28,13)\n" + "(1,1)\n" +


[3/7] flink git commit: [FLINK-6707] [examples] Activate strict checkstyle for flink-examples

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
index 14fbc34..c585e82 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
@@ -20,67 +20,60 @@ package org.apache.flink.examples.java.relational;
 
 import org.apache.flink.api.common.functions.FilterFunction;
 import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.aggregation.Aggregations;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.tuple.Tuple4;
 import org.apache.flink.api.java.tuple.Tuple5;
 import org.apache.flink.api.java.tuple.Tuple6;
-
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.utils.ParameterTool;
 
 /**
  * This program implements a modified version of the TPC-H query 10.
  * The original query can be found at
  * <a href="http://www.tpc.org/tpch/spec/tpch2.16.0.pdf">http://www.tpc.org/tpch/spec/tpch2.16.0.pdf</a> (page 45).
- * 
- * <p>
- * This program implements the following SQL equivalent:
- * 
- * <p>
- * <pre>{@code
- * SELECT 
+ *
+ * <p>This program implements the following SQL equivalent:
+ *
+ * <p><pre>{@code
+ * SELECT
  *        c_custkey,
- *        c_name, 
+ *        c_name,
  *        c_address,
- *        n_name, 
+ *        n_name,
  *        c_acctbal
- *        SUM(l_extendedprice * (1 - l_discount)) AS revenue,  
- * FROM   
- *        customer, 
- *        orders, 
- *        lineitem, 
- *        nation 
- * WHERE 
- *        c_custkey = o_custkey 
- *        AND l_orderkey = o_orderkey 
- *        AND YEAR(o_orderdate) > '1990' 
- *        AND l_returnflag = 'R' 
- *        AND c_nationkey = n_nationkey 
- * GROUP BY 
- *        c_custkey, 
- *        c_name, 
- *        c_acctbal, 
- *        n_name, 
+ *        SUM(l_extendedprice * (1 - l_discount)) AS revenue,
+ * FROM
+ *        customer,
+ *        orders,
+ *        lineitem,
+ *        nation
+ * WHERE
+ *        c_custkey = o_custkey
+ *        AND l_orderkey = o_orderkey
+ *        AND YEAR(o_orderdate) > '1990'
+ *        AND l_returnflag = 'R'
+ *        AND c_nationkey = n_nationkey
+ * GROUP BY
+ *        c_custkey,
+ *        c_name,
+ *        c_acctbal,
+ *        n_name,
  *        c_address
  * }</pre>
- *        
- * <p>
- * Compared to the original TPC-H query this version does not print 
+ *
+ * <p>Compared to the original TPC-H query this version does not print
  * c_phone and c_comment, only filters by years greater than 1990 instead of
  * a period of 3 months, and does not sort the result by revenue.
- * 
- * <p>
- * Input files are plain text CSV files using the pipe character ('|') as field separator 
+ *
+ * <p>Input files are plain text CSV files using the pipe character ('|') as field separator
  * as generated by the TPC-H data generator which is available at <a href="http://www.tpc.org/tpch/">http://www.tpc.org/tpch/</a>.
- * 
- * <p>
- * Usage: <code>TPCHQuery10 --customer &lt;path&gt; --orders &lt;path&gt; --lineitem&lt;path&gt; --nation &lt;path&gt; --output &lt;path&gt;</code><br>
- *  
- * <p>
- * This example shows how to use:
+ *
+ * <p>Usage: <code>TPCHQuery10 --customer &lt;path&gt; --orders &lt;path&gt; --lineitem&lt;path&gt; --nation &lt;path&gt; --output &lt;path&gt;</code><br>
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li> tuple data types
  * <li> inline-defined functions
@@ -90,11 +83,11 @@ import org.apache.flink.api.java.utils.ParameterTool;
  */
 @SuppressWarnings("serial")
 public class TPCHQuery10 {
-	
+
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
 
 		final ParameterTool params = ParameterTool.fromArgs(args);
@@ -109,7 +102,7 @@ public class TPCHQuery10 {
 			return;
 		}
 
-		// get customer data set: (custkey, name, address, nationkey, acctbal) 
+		// get customer data set: (custkey, name, address, nationkey, acctbal)
 		DataSet<Tuple5<Integer, String, String, Integer, Double>> customers =
 			getCustomerDataSet(env, params.get("customer"));
 		// get orders data set: (orderkey, custkey, orderdate)
@@ -126,17 +119,17 @@ public class TPCHQuery10 {
 		DataSet<Tuple2<Integer, Integer>> ordersFilteredByYear =
 				// filter by year
 				orders.filter(
-								new FilterFunction<Tuple3<Integer,Integer, String>>() {
+								new FilterFunction<Tuple3<Integer, Integer, String>>() {
 									@Override
 									public boolean filter(Tuple3<Integer, Integer, String> o) {
 										return Integer.parseInt(o.f2.substring(0, 4)) > 1990;
 									}
 								})
 				// project fields out that are no longer required
-				.project(0,1);
+				.project(0, 1);
 
 		// lineitems filtered by flag: (orderkey, revenue)
-		DataSet<Tuple2<Integer, Double>> lineitemsFilteredByFlag = 
+		DataSet<Tuple2<Integer, Double>> lineitemsFilteredByFlag =
 				// filter by flag
 				lineitems.filter(new FilterFunction<Tuple4<Integer, Double, Double, String>>() {
 										@Override
@@ -154,24 +147,24 @@ public class TPCHQuery10 {
 					});
 
 		// join orders with lineitems: (custkey, revenue)
-		DataSet<Tuple2<Integer, Double>> revenueByCustomer = 
+		DataSet<Tuple2<Integer, Double>> revenueByCustomer =
 				ordersFilteredByYear.joinWithHuge(lineitemsFilteredByFlag)
 									.where(0).equalTo(0)
 									.projectFirst(1).projectSecond(1);
-		
+
 		revenueByCustomer = revenueByCustomer.groupBy(0).aggregate(Aggregations.SUM, 1);
 
 		// join customer with nation (custkey, name, address, nationname, acctbal)
 		DataSet<Tuple5<Integer, String, String, String, Double>> customerWithNation = customers
 						.joinWithTiny(nations)
 						.where(3).equalTo(0)
-						.projectFirst(0,1,2).projectSecond(1).projectFirst(4);
+						.projectFirst(0, 1, 2).projectSecond(1).projectFirst(4);
 
 		// join customer (with nation) with revenue (custkey, name, address, nationname, acctbal, revenue)
-		DataSet<Tuple6<Integer, String, String, String, Double, Double>> result = 
+		DataSet<Tuple6<Integer, String, String, String, Double, Double>> result =
 				customerWithNation.join(revenueByCustomer)
 				.where(0).equalTo(0)
-				.projectFirst(0,1,2,3,4).projectSecond(1);
+				.projectFirst(0, 1, 2, 3, 4).projectSecond(1);
 
 		// emit result
 		if (params.has("output")) {
@@ -182,20 +175,20 @@ public class TPCHQuery10 {
 			System.out.println("Printing result to stdout. Use --output to specify output path.");
 			result.print();
 		}
-		
+
 	}
-	
+
 	// *************************************************************************
 	//     UTIL METHODS
 	// *************************************************************************
-	
+
 	private static DataSet<Tuple5<Integer, String, String, Integer, Double>> getCustomerDataSet(ExecutionEnvironment env, String customerPath) {
 		return env.readCsvFile(customerPath)
 					.fieldDelimiter("|")
 					.includeFields("11110100")
 					.types(Integer.class, String.class, String.class, Integer.class, Double.class);
 	}
-	
+
 	private static DataSet<Tuple3<Integer, Integer, String>> getOrdersDataSet(ExecutionEnvironment env, String ordersPath) {
 		return env.readCsvFile(ordersPath)
 					.fieldDelimiter("|")
@@ -209,7 +202,7 @@ public class TPCHQuery10 {
 					.includeFields("1000011010000000")
 					.types(Integer.class, Double.class, Double.class, String.class);
 	}
-	
+
 	private static DataSet<Tuple2<Integer, String>> getNationsDataSet(ExecutionEnvironment env, String nationPath) {
 		return env.readCsvFile(nationPath)
 					.fieldDelimiter("|")

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
index c849764..f416f30 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
@@ -18,11 +18,6 @@
 
 package org.apache.flink.examples.java.relational;
 
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
 import org.apache.flink.api.common.functions.FilterFunction;
 import org.apache.flink.api.common.functions.JoinFunction;
 import org.apache.flink.api.java.DataSet;
@@ -32,50 +27,49 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple4;
 import org.apache.flink.api.java.utils.ParameterTool;
 
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
 /**
  * This program implements a modified version of the TPC-H query 3. The
  * example demonstrates how to assign names to fields by extending the Tuple class.
  * The original query can be found at
  * <a href="http://www.tpc.org/tpch/spec/tpch2.16.0.pdf">http://www.tpc.org/tpch/spec/tpch2.16.0.pdf</a> (page 29).
  *
- * <p>
- * This program implements the following SQL equivalent:
+ * <p>This program implements the following SQL equivalent:
  *
- * <p>
- * <pre>{@code
- * SELECT 
- *      l_orderkey, 
+ * <p><pre>{@code
+ * SELECT
+ *      l_orderkey,
  *      SUM(l_extendedprice*(1-l_discount)) AS revenue,
- *      o_orderdate, 
- *      o_shippriority 
- * FROM customer, 
- *      orders, 
- *      lineitem 
+ *      o_orderdate,
+ *      o_shippriority
+ * FROM customer,
+ *      orders,
+ *      lineitem
  * WHERE
- *      c_mktsegment = '[SEGMENT]' 
+ *      c_mktsegment = '[SEGMENT]'
  *      AND c_custkey = o_custkey
  *      AND l_orderkey = o_orderkey
  *      AND o_orderdate < date '[DATE]'
  *      AND l_shipdate > date '[DATE]'
  * GROUP BY
- *      l_orderkey, 
- *      o_orderdate, 
+ *      l_orderkey,
+ *      o_orderdate,
  *      o_shippriority;
  * }</pre>
  *
- * <p>
- * Compared to the original TPC-H query this version does not sort the result by revenue
+ * <p>Compared to the original TPC-H query this version does not sort the result by revenue
  * and orderdate.
  *
- * <p>
- * Input files are plain text CSV files using the pipe character ('|') as field separator 
+ * <p>Input files are plain text CSV files using the pipe character ('|') as field separator
  * as generated by the TPC-H data generator which is available at <a href="http://www.tpc.org/tpch/">http://www.tpc.org/tpch/</a>.
  *
- *  <p>
- * Usage: <code>TPCHQuery3 --lineitem&lt;path&gt; --customer &lt;path&gt; --orders&lt;path&gt; --output &lt;path&gt;</code><br>
- *  
- * <p>
- * This example shows how to use:
+ * <p>Usage: <code>TPCHQuery3 --lineitem&lt;path&gt; --customer &lt;path&gt; --orders&lt;path&gt; --output &lt;path&gt;</code><br>
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li> custom data type derived from tuple data types
  * <li> inline-defined functions
@@ -88,9 +82,9 @@ public class TPCHQuery3 {
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
-		
+
 		final ParameterTool params = ParameterTool.fromArgs(args);
 
 		if (!params.has("lineitem") && !params.has("customer") && !params.has("orders")) {
@@ -109,7 +103,7 @@ public class TPCHQuery3 {
 		DataSet<Lineitem> lineitems = getLineitemDataSet(env, params.get("lineitem"));
 		DataSet<Order> orders = getOrdersDataSet(env, params.get("customer"));
 		DataSet<Customer> customers = getCustomerDataSet(env, params.get("orders"));
-		
+
 		// Filter market segment "AUTOMOBILE"
 		customers = customers.filter(
 								new FilterFunction<Customer>() {
@@ -124,19 +118,19 @@ public class TPCHQuery3 {
 							new FilterFunction<Order>() {
 								private final DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
 								private final Date date = format.parse("1995-03-12");
-								
+
 								@Override
 								public boolean filter(Order o) throws ParseException {
 									return format.parse(o.getOrderdate()).before(date);
 								}
 							});
-		
+
 		// Filter all Lineitems with l_shipdate > 12.03.1995
 		lineitems = lineitems.filter(
 								new FilterFunction<Lineitem>() {
 									private final DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
 									private final Date date = format.parse("1995-03-12");
-									
+
 									@Override
 									public boolean filter(Lineitem l) throws ParseException {
 										return format.parse(l.getShipdate()).after(date);
@@ -144,7 +138,7 @@ public class TPCHQuery3 {
 								});
 
 		// Join customers with orders and package them into a ShippingPriorityItem
-		DataSet<ShippingPriorityItem> customerWithOrders = 
+		DataSet<ShippingPriorityItem> customerWithOrders =
 				customers.join(orders).where(0).equalTo(1)
 							.with(
 								new JoinFunction<Customer, Order, ShippingPriorityItem>() {
@@ -154,9 +148,9 @@ public class TPCHQuery3 {
 												o.getShippriority());
 									}
 								});
-		
+
 		// Join the last join result with Lineitems
-		DataSet<ShippingPriorityItem> result = 
+		DataSet<ShippingPriorityItem> result =
 				customerWithOrders.join(lineitems).where(0).equalTo(0)
 									.with(
 											new JoinFunction<ShippingPriorityItem, Lineitem, ShippingPriorityItem>() {
@@ -169,7 +163,7 @@ public class TPCHQuery3 {
 								// Group by l_orderkey, o_orderdate and o_shippriority and compute revenue sum
 								.groupBy(0, 2, 3)
 								.aggregate(Aggregations.SUM, 1);
-		
+
 		// emit result
 		if (params.has("output")) {
 			result.writeAsCsv(params.get("output"), "\n", "|");
@@ -185,68 +179,111 @@ public class TPCHQuery3 {
 	// *************************************************************************
 	//     DATA TYPES
 	// *************************************************************************
-	
-	public static class Lineitem extends Tuple4<Long, Double, Double, String> {
 
-		public Long getOrderkey() { return this.f0; }
-		public Double getDiscount() { return this.f2; }
-		public Double getExtendedprice() { return this.f1; }
-		public String getShipdate() { return this.f3; }
+	private static class Lineitem extends Tuple4<Long, Double, Double, String> {
+
+		public Long getOrderkey() {
+			return this.f0;
+		}
+
+		public Double getDiscount() {
+			return this.f2;
+		}
+
+		public Double getExtendedprice() {
+			return this.f1;
+		}
+
+		public String getShipdate() {
+			return this.f3;
+		}
 	}
 
-	public static class Customer extends Tuple2<Long, String> {
-		
-		public Long getCustKey() { return this.f0; }
-		public String getMktsegment() { return this.f1; }
+	private static class Customer extends Tuple2<Long, String> {
+
+		public Long getCustKey() {
+			return this.f0;
+		}
+
+		public String getMktsegment() {
+			return this.f1;
+		}
 	}
 
-	public static class Order extends Tuple4<Long, Long, String, Long> {
-		
-		public Long getOrderKey() { return this.f0; }
-		public Long getCustKey() { return this.f1; }
-		public String getOrderdate() { return this.f2; }
-		public Long getShippriority() { return this.f3; }
+	private static class Order extends Tuple4<Long, Long, String, Long> {
+
+		public Long getOrderKey() {
+			return this.f0;
+		}
+
+		public Long getCustKey() {
+			return this.f1;
+		}
+
+		public String getOrderdate() {
+			return this.f2;
+		}
+
+		public Long getShippriority() {
+			return this.f3;
+		}
 	}
 
-	public static class ShippingPriorityItem extends Tuple4<Long, Double, String, Long> {
+	private static class ShippingPriorityItem extends Tuple4<Long, Double, String, Long> {
+
+		public ShippingPriorityItem() {}
+
+		public ShippingPriorityItem(Long orderkey, Double revenue,
+				String orderdate, Long shippriority) {
+			this.f0 = orderkey;
+			this.f1 = revenue;
+			this.f2 = orderdate;
+			this.f3 = shippriority;
+		}
+
+		public Long getOrderkey() {
+			return this.f0;
+		}
 
-		public ShippingPriorityItem() { }
+		public void setOrderkey(Long orderkey) {
+			this.f0 = orderkey;
+		}
 
-		public ShippingPriorityItem(Long o_orderkey, Double revenue,
-				String o_orderdate, Long o_shippriority) {
-			this.f0 = o_orderkey;
+		public Double getRevenue() {
+			return this.f1;
+		}
+
+		public void setRevenue(Double revenue) {
 			this.f1 = revenue;
-			this.f2 = o_orderdate;
-			this.f3 = o_shippriority;
 		}
-		
-		public Long getOrderkey() { return this.f0; }
-		public void setOrderkey(Long orderkey) { this.f0 = orderkey; }
-		public Double getRevenue() { return this.f1; }
-		public void setRevenue(Double revenue) { this.f1 = revenue; }
-		
-		public String getOrderdate() { return this.f2; }
-		public Long getShippriority() { return this.f3; }
+
+		public String getOrderdate() {
+			return this.f2;
+		}
+
+		public Long getShippriority() {
+			return this.f3;
+		}
 	}
-	
+
 	// *************************************************************************
 	//     UTIL METHODS
 	// *************************************************************************
-	
+
 	private static DataSet<Lineitem> getLineitemDataSet(ExecutionEnvironment env, String lineitemPath) {
 		return env.readCsvFile(lineitemPath)
 					.fieldDelimiter("|")
 					.includeFields("1000011000100000")
 					.tupleType(Lineitem.class);
 	}
-	
+
 	private static DataSet<Customer> getCustomerDataSet(ExecutionEnvironment env, String customerPath) {
 		return env.readCsvFile(customerPath)
 					.fieldDelimiter("|")
 					.includeFields("10000010")
 					.tupleType(Customer.class);
 	}
-	
+
 	private static DataSet<Order> getOrdersDataSet(ExecutionEnvironment env, String ordersPath) {
 		return env.readCsvFile(ordersPath)
 					.fieldDelimiter("|")

http://git-wip-us.apache.org/repos/asf/flink/blob/789ed8a8/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/WebLogAnalysis.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/WebLogAnalysis.java b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/WebLogAnalysis.java
index 5c8fac5..579d1ac 100644
--- a/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/WebLogAnalysis.java
+++ b/flink-examples/flink-examples-batch/src/main/java/org/apache/flink/examples/java/relational/WebLogAnalysis.java
@@ -31,29 +31,28 @@ import org.apache.flink.examples.java.relational.util.WebLogData;
 import org.apache.flink.util.Collector;
 
 /**
- * This program processes web logs and relational data. 
+ * This program processes web logs and relational data.
  * It implements the following relational query:
  *
  * <pre>{@code
- * SELECT 
- *       r.pageURL, 
- *       r.pageRank, 
+ * SELECT
+ *       r.pageURL,
+ *       r.pageRank,
  *       r.avgDuration
  * FROM documents d JOIN rankings r
  *                  ON d.url = r.url
- * WHERE CONTAINS(d.text, [keywords]) 
- *       AND r.rank > [rank] 
- *       AND NOT EXISTS 
+ * WHERE CONTAINS(d.text, [keywords])
+ *       AND r.rank > [rank]
+ *       AND NOT EXISTS
  *           (
  *              SELECT * FROM Visits v
- *              WHERE v.destUrl = d.url 
+ *              WHERE v.destUrl = d.url
  *                    AND v.visitDate < [date]
  *           );
  * }</pre>
  *
- * <p>
- * Input files are plain text CSV files using the pipe character ('|') as field separator.
- * The tables referenced in the query can be generated using the {@link org.apache.flink.examples.java.relational.util.WebLogDataGenerator} and 
+ * <p>Input files are plain text CSV files using the pipe character ('|') as field separator.
+ * The tables referenced in the query can be generated using the {@link org.apache.flink.examples.java.relational.util.WebLogDataGenerator} and
  * have the following schemas
  * <pre>{@code
  * CREATE TABLE Documents (
@@ -76,29 +75,26 @@ import org.apache.flink.util.Collector;
  *                searchWord VARCHAR(32),
  *                duration INT );
  * }</pre>
- * 
- * <p>
- * Usage: <code>WebLogAnalysis --documents &lt;path&gt; --ranks &lt;path&gt; --visits &lt;path&gt; --result &lt;path&gt;</code><br>
+ *
+ * <p>Usage: <code>WebLogAnalysis --documents &lt;path&gt; --ranks &lt;path&gt; --visits &lt;path&gt; --result &lt;path&gt;</code><br>
  * If no parameters are provided, the program is run with default data from {@link WebLogData}.
- * 
- * <p>
- * This example shows how to use:
+ *
+ * <p>This example shows how to use:
  * <ul>
  * <li> tuple data types
  * <li> projection and join projection
  * <li> the CoGroup transformation for an anti-join
  * </ul>
- * 
  */
 @SuppressWarnings("serial")
 public class WebLogAnalysis {
-	
+
 	// *************************************************************************
 	//     PROGRAM
 	// *************************************************************************
-	
+
 	public static void main(String[] args) throws Exception {
-		
+
 		final ParameterTool params = ParameterTool.fromArgs(args);
 
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
@@ -109,7 +105,7 @@ public class WebLogAnalysis {
 		DataSet<Tuple2<String, String>> documents = getDocumentsDataSet(env, params);
 		DataSet<Tuple3<Integer, String, Integer>> ranks = getRanksDataSet(env, params);
 		DataSet<Tuple2<String, String>> visits = getVisitsDataSet(env, params);
-		
+
 		// Retain documents with keywords
 		DataSet<Tuple1<String>> filterDocs = documents
 				.filter(new FilterDocByKeyWords())
@@ -125,19 +121,19 @@ public class WebLogAnalysis {
 				.project(0);
 
 		// Join the filtered documents and ranks, i.e., get all URLs with min rank and keywords
-		DataSet<Tuple3<Integer, String, Integer>> joinDocsRanks = 
+		DataSet<Tuple3<Integer, String, Integer>> joinDocsRanks =
 				filterDocs.join(filterRanks)
 							.where(0).equalTo(1)
-							.projectSecond(0,1,2);
+							.projectSecond(0, 1, 2);
 
 		// Anti-join urls with visits, i.e., retain all URLs which have NOT been visited in a certain time
-		DataSet<Tuple3<Integer, String, Integer>> result = 
+		DataSet<Tuple3<Integer, String, Integer>> result =
 				joinDocsRanks.coGroup(filterVisits)
 								.where(1).equalTo(0)
 								.with(new AntiJoinVisits());
 
 		// emit result
-		if(params.has("output")) {
+		if (params.has("output")) {
 			result.writeAsCsv(params.get("output"), "\n", "|");
 			// execute program
 			env.execute("WebLogAnalysis Example");
@@ -150,7 +146,7 @@ public class WebLogAnalysis {
 	// *************************************************************************
 	//     USER FUNCTIONS
 	// *************************************************************************
-	
+
 	/**
 	 * MapFunction that filters for documents that contain a certain set of
 	 * keywords.
@@ -162,7 +158,7 @@ public class WebLogAnalysis {
 		/**
 		 * Filters for documents that contain all of the given keywords and projects the records on the URL field.
 		 *
-		 * Output Format:
+		 * <p>Output Format:
 		 * 0: URL
 		 * 1: DOCUMENT_TEXT
 		 */
@@ -191,7 +187,7 @@ public class WebLogAnalysis {
 		 * Filters for records of the rank relation where the rank is greater
 		 * than the given threshold.
 		 *
-		 * Output Format:
+		 * <p>Output Format:
 		 * 0: RANK
 		 * 1: URL
 		 * 2: AVG_DURATION
@@ -214,7 +210,7 @@ public class WebLogAnalysis {
 		 * Filters for records of the visits relation where the year of visit is equal to a
 		 * specified value. The URL of all visit records passing the filter is emitted.
 		 *
-		 * Output Format:
+		 * <p>Output Format:
 		 * 0: URL
 		 * 1: DATE
 		 */
@@ -222,7 +218,7 @@ public class WebLogAnalysis {
 		public boolean filter(Tuple2<String, String> value) throws Exception {
 			// Parse date string with the format YYYY-MM-DD and extract the year
 			String dateString = value.f1;
-			int year = Integer.parseInt(dateString.substring(0,4));
+			int year = Integer.parseInt(dateString.substring(0, 4));
 			return (year == YEARFILTER);
 		}
 	}
@@ -240,7 +236,7 @@ public class WebLogAnalysis {
 		 * If the visit iterator is empty, all pairs of the rank iterator are emitted.
 		 * Otherwise, no pair is emitted.
 		 *
-		 * Output Format:
+		 * <p>Output Format:
 		 * 0: RANK
 		 * 1: URL
 		 * 2: AVG_DURATION
@@ -260,10 +256,10 @@ public class WebLogAnalysis {
 	// *************************************************************************
 	//     UTIL METHODS
 	// *************************************************************************
-	
+
 	private static DataSet<Tuple2<String, String>> getDocumentsDataSet(ExecutionEnvironment env, ParameterTool params) {
 		// Create DataSet for documents relation (URL, Doc-Text)
-		if(params.has("documents")) {
+		if (params.has("documents")) {
 			return env.readCsvFile(params.get("documents"))
 						.fieldDelimiter("|")
 						.types(String.class, String.class);
@@ -273,10 +269,10 @@ public class WebLogAnalysis {
 			return WebLogData.getDocumentDataSet(env);
 		}
 	}
-	
+
 	private static DataSet<Tuple3<Integer, String, Integer>> getRanksDataSet(ExecutionEnvironment env, ParameterTool params) {
 		// Create DataSet for ranks relation (Rank, URL, Avg-Visit-Duration)
-		if(params.has("ranks")) {
+		if (params.has("ranks")) {
 			return env.readCsvFile(params.get("ranks"))
 						.fieldDelimiter("|")
 						.types(Integer.class, String.class, Integer.class);
@@ -289,7 +285,7 @@ public class WebLogAnalysis {
 
 	private static DataSet<Tuple2<String, String>> getVisitsDataSet(ExecutionEnvironment env, ParameterTool params) {
 		// Create DataSet for visits relation (URL, Date)
-		if(params.has("visits")) {
+		if (params.has("visits")) {
 			return env.readCsvFile(params.get("visits"))
 						.fieldDelimiter("|")
 						.includeFields("011000000")
@@ -300,5 +296,5 @@ public class WebLogAnalysis {
 			return WebLogData.getVisitDataSet(env);
 		}
 	}
-		
+
 }


[6/7] flink git commit: [FLINK-6706][tests] Remove outdated/unused ChaosMonkeyITCase

Posted by ch...@apache.org.
[FLINK-6706][tests] Remove outdated/unused ChaosMonkeyITCase

This test was disabled in Dec 2015 due to its instability and never made it
back again. It is probably outdated and may not even work anymore due to the
changes since then.
Since it doesn't make sense to keep it in its current form, let's remove it.

This closes #3980.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/20e4b994
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/20e4b994
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/20e4b994

Branch: refs/heads/master
Commit: 20e4b99436f2dc6398c57e1cda5450b11cc13d17
Parents: 1cd0ee7
Author: Nico Kruber <ni...@data-artisans.com>
Authored: Wed May 24 16:54:11 2017 +0200
Committer: zentol <ch...@apache.org>
Committed: Thu May 25 09:50:03 2017 +0200

----------------------------------------------------------------------
 .../flink/test/recovery/ChaosMonkeyITCase.java  | 732 -------------------
 1 file changed, 732 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/20e4b994/flink-tests/src/test/java/org/apache/flink/test/recovery/ChaosMonkeyITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/recovery/ChaosMonkeyITCase.java b/flink-tests/src/test/java/org/apache/flink/test/recovery/ChaosMonkeyITCase.java
deleted file mode 100644
index c8c8d2a..0000000
--- a/flink-tests/src/test/java/org/apache/flink/test/recovery/ChaosMonkeyITCase.java
+++ /dev/null
@@ -1,732 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.test.recovery;
-
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import org.apache.commons.io.FileUtils;
-import org.apache.flink.api.common.JobID;
-import org.apache.flink.configuration.AkkaOptions;
-import org.apache.flink.configuration.ConfigConstants;
-import org.apache.flink.configuration.Configuration;
-import org.apache.flink.configuration.HighAvailabilityOptions;
-import org.apache.flink.runtime.akka.AkkaUtils;
-import org.apache.flink.runtime.akka.ListeningBehaviour;
-import org.apache.flink.runtime.highavailability.HighAvailabilityServices;
-import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils;
-import org.apache.flink.runtime.instance.AkkaActorGateway;
-import org.apache.flink.runtime.jobgraph.JobGraph;
-import org.apache.flink.runtime.jobgraph.JobStatus;
-import org.apache.flink.runtime.leaderelection.TestingListener;
-import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
-import org.apache.flink.runtime.messages.JobManagerMessages;
-import org.apache.flink.runtime.state.filesystem.FsStateBackendFactory;
-import org.apache.flink.runtime.testingUtils.TestingUtils;
-import org.apache.flink.runtime.testutils.CommonTestUtils;
-import org.apache.flink.runtime.testutils.JobManagerActorTestUtils;
-import org.apache.flink.runtime.testutils.JobManagerProcess;
-import org.apache.flink.runtime.testutils.TaskManagerProcess;
-import org.apache.flink.runtime.testutils.TestJvmProcess;
-import org.apache.flink.runtime.testutils.ZooKeeperTestUtils;
-import org.apache.flink.runtime.zookeeper.ZooKeeperTestEnvironment;
-import org.apache.flink.runtime.state.CheckpointListener;
-import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
-import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
-import org.apache.flink.util.TestLogger;
-import org.junit.AfterClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.Deadline;
-import scala.concurrent.duration.FiniteDuration;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Random;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import static org.apache.flink.util.Preconditions.checkArgument;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-@Ignore
-public class ChaosMonkeyITCase extends TestLogger {
-
-	private static final Logger LOG = LoggerFactory.getLogger(ChaosMonkeyITCase.class);
-
-	private final static ZooKeeperTestEnvironment ZooKeeper = new ZooKeeperTestEnvironment(1);
-
-	private final static File FileStateBackendBasePath;
-
-	private final static File CheckpointCompletedCoordination;
-
-	private final static File ProceedCoordination;
-
-	private final static String COMPLETED_PREFIX = "completed_";
-
-	private final static long LastElement = -1;
-
-	private final Random rand = new Random();
-
-	private int jobManagerPid;
-	private int taskManagerPid;
-
-	static {
-		try {
-			FileStateBackendBasePath = CommonTestUtils.createTempDirectory();
-			CheckpointCompletedCoordination = new File(FileStateBackendBasePath, COMPLETED_PREFIX);
-			ProceedCoordination = new File(FileStateBackendBasePath, "proceed");
-		}
-		catch (IOException e) {
-			throw new RuntimeException("Error in test setup. Could not create directory.", e);
-		}
-	}
-
-	@AfterClass
-	public static void tearDown() throws Exception {
-		if (ZooKeeper != null) {
-			ZooKeeper.shutdown();
-		}
-
-		if (FileStateBackendBasePath != null) {
-			FileUtils.deleteDirectory(FileStateBackendBasePath);
-		}
-	}
-
-	@Test
-	public void testChaosMonkey() throws Exception {
-		// Test config
-		final int numberOfJobManagers = 3;
-		final int numberOfTaskManagers = 3;
-		final int numberOfSlotsPerTaskManager = 2;
-
-		// The final count each source is counting to: 1...n
-		final int n = 5000;
-
-		// Parallelism for the program
-		final int parallelism = numberOfTaskManagers * numberOfSlotsPerTaskManager;
-
-		// The test should not run longer than this
-		final FiniteDuration testDuration = new FiniteDuration(10, TimeUnit.MINUTES);
-
-		// Every x seconds a random job or task manager is killed
-		//
-		// The job will will be running for $killEvery seconds and then a random Job/TaskManager
-		// will be killed. On recovery (which takes some time to bring up the new process etc.),
-		// this test will wait for task managers to reconnect before starting the next count down.
-		// Therefore the delay between retries is not important in this setup.
-		final FiniteDuration killEvery = new FiniteDuration(5, TimeUnit.SECONDS);
-
-		// Trigger a checkpoint every
-		final int checkpointingIntervalMs = 1000;
-
-		// Total number of kills
-		final int totalNumberOfKills = 10;
-
-		// -----------------------------------------------------------------------------------------
-
-		// Setup
-		Configuration config = ZooKeeperTestUtils.createZooKeeperHAConfig(
-				ZooKeeper.getConnectString(), FileStateBackendBasePath.toURI().toString());
-
-		// Akka and restart timeouts
-		config.setString(AkkaOptions.WATCH_HEARTBEAT_INTERVAL, "1000 ms");
-		config.setString(AkkaOptions.WATCH_HEARTBEAT_PAUSE, "6 s");
-		config.setInteger(AkkaOptions.WATCH_THRESHOLD, 9);
-
-		if (checkpointingIntervalMs >= killEvery.toMillis()) {
-			throw new IllegalArgumentException("Relax! You want to kill processes every " +
-					killEvery + ", but the checkpointing interval is " +
-					checkpointingIntervalMs / 1000 + " seconds. Either decrease the interval or " +
-					"increase the kill interval. Otherwise, the program will not complete any " +
-					"checkpoint.");
-		}
-
-		// Task manager
-		config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, numberOfSlotsPerTaskManager);
-
-		final HighAvailabilityServices highAvailabilityServices = HighAvailabilityServicesUtils.createHighAvailabilityServices(
-			config,
-			TestingUtils.defaultExecutor(),
-			HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION);
-
-		ActorSystem testActorSystem = null;
-		LeaderRetrievalService leaderRetrievalService = null;
-		List<JobManagerProcess> jobManagerProcesses = new ArrayList<>();
-		List<TaskManagerProcess> taskManagerProcesses = new ArrayList<>();
-
-		try {
-			// Initial state
-			for (int i = 0; i < numberOfJobManagers; i++) {
-				jobManagerProcesses.add(createAndStartJobManagerProcess(config));
-			}
-
-			for (int i = 0; i < numberOfTaskManagers; i++) {
-				taskManagerProcesses.add(createAndStartTaskManagerProcess(config));
-			}
-
-			testActorSystem = AkkaUtils.createDefaultActorSystem();
-
-			// Leader listener
-			leaderRetrievalService = highAvailabilityServices.getJobManagerLeaderRetriever(HighAvailabilityServices.DEFAULT_JOB_ID);
-			TestingListener leaderListener = new TestingListener();
-			leaderRetrievalService.start(leaderListener);
-
-			Deadline deadline = testDuration.fromNow();
-
-			// Wait for the new leader
-			int leaderIndex = waitForNewLeader(
-					leaderListener, jobManagerProcesses, deadline.timeLeft());
-
-			// Wait for the task managers to connect
-			waitForTaskManagers(
-					numberOfTaskManagers,
-					jobManagerProcesses.get(leaderIndex),
-					testActorSystem,
-					deadline.timeLeft());
-
-			// The job
-			JobGraph jobGraph = createJobGraph(n, CheckpointCompletedCoordination.getPath(),
-					ProceedCoordination.getPath(), parallelism, checkpointingIntervalMs);
-
-			LOG.info("Submitting job {}", jobGraph.getJobID());
-			submitJobGraph(jobGraph, jobManagerProcesses.get(leaderIndex), leaderListener,
-					testActorSystem, deadline.timeLeft());
-
-			LOG.info("Waiting for a checkpoint to complete before kicking off chaos");
-
-			// Wait for a checkpoint to complete
-			TestJvmProcess.waitForMarkerFiles(FileStateBackendBasePath, COMPLETED_PREFIX,
-					parallelism, deadline.timeLeft().toMillis());
-
-			LOG.info("Checkpoint completed... ready for chaos");
-
-			int currentKillNumber = 1;
-			int currentJobManagerKills = 0;
-			int currentTaskManagerKills = 0;
-
-			for (int i = 0; i < totalNumberOfKills; i++) {
-				LOG.info("Waiting for {} before next kill ({}/{})", killEvery, currentKillNumber++, totalNumberOfKills);
-				Thread.sleep(killEvery.toMillis());
-
-				LOG.info("Checking job status...");
-
-				JobStatus jobStatus = requestJobStatus(jobGraph.getJobID(),
-						jobManagerProcesses.get(leaderIndex), testActorSystem, deadline.timeLeft());
-
-				if (jobStatus != JobStatus.RUNNING && jobStatus != JobStatus.FINISHED) {
-					// Wait for it to run
-					LOG.info("Waiting for job status {}", JobStatus.RUNNING);
-					waitForJobRunning(jobGraph.getJobID(), jobManagerProcesses.get(leaderIndex),
-							testActorSystem, deadline.timeLeft());
-				}
-				else if (jobStatus == JobStatus.FINISHED) {
-					// Early finish
-					LOG.info("Job finished");
-					return;
-				}
-				else {
-					LOG.info("Job status is {}", jobStatus);
-				}
-
-				if (rand.nextBoolean()) {
-					LOG.info("Killing the leading JobManager");
-
-					JobManagerProcess newJobManager = createAndStartJobManagerProcess(config);
-
-					JobManagerProcess leader = jobManagerProcesses.remove(leaderIndex);
-					leader.destroy();
-					currentJobManagerKills++;
-
-					LOG.info("Killed {}", leader);
-
-					// Make sure to add the new job manager before looking for a new leader
-					jobManagerProcesses.add(newJobManager);
-
-					// Wait for the new leader
-					leaderIndex = waitForNewLeader(
-							leaderListener, jobManagerProcesses, deadline.timeLeft());
-
-					// Wait for the task managers to connect
-					waitForTaskManagers(
-							numberOfTaskManagers,
-							jobManagerProcesses.get(leaderIndex),
-							testActorSystem,
-							deadline.timeLeft());
-				}
-				else {
-					LOG.info("Killing a random TaskManager");
-					TaskManagerProcess newTaskManager = createAndStartTaskManagerProcess(config);
-
-					// Wait for this new task manager to be connected
-					waitForTaskManagers(
-							numberOfTaskManagers + 1,
-							jobManagerProcesses.get(leaderIndex),
-							testActorSystem,
-							deadline.timeLeft());
-
-					// Now it's safe to kill a process
-					int next = rand.nextInt(numberOfTaskManagers);
-					TaskManagerProcess taskManager = taskManagerProcesses.remove(next);
-
-					LOG.info("{} has been chosen. Killing process...", taskManager);
-
-					taskManager.destroy();
-					currentTaskManagerKills++;
-
-					// Add the new task manager after killing an old one
-					taskManagerProcesses.add(newTaskManager);
-				}
-			}
-
-			LOG.info("Chaos is over. Total kills: {} ({} job manager + {} task managers). " +
-							"Checking job status...",
-					totalNumberOfKills, currentJobManagerKills, currentTaskManagerKills);
-
-			// Signal the job to speed up (if it is not done yet)
-			TestJvmProcess.touchFile(ProceedCoordination);
-
-			// Wait for the job to finish
-			LOG.info("Waiting for job status {}", JobStatus.FINISHED);
-			waitForJobFinished(jobGraph.getJobID(), jobManagerProcesses.get(leaderIndex),
-					testActorSystem, deadline.timeLeft());
-
-			LOG.info("Job finished");
-
-			LOG.info("Waiting for job removal");
-			waitForJobRemoved(jobGraph.getJobID(), jobManagerProcesses.get(leaderIndex),
-					testActorSystem, deadline.timeLeft());
-			LOG.info("Job removed");
-
-			LOG.info("Checking clean recovery state...");
-			checkCleanRecoveryState(config);
-			LOG.info("Recovery state clean");
-		}
-		catch (Throwable t) {
-			// Print early (in some situations the process logs get too big
-			// for Travis and the root problem is not shown)
-			t.printStackTrace();
-
-			System.out.println("#################################################");
-			System.out.println(" TASK MANAGERS");
-			System.out.println("#################################################");
-
-			for (TaskManagerProcess taskManagerProcess : taskManagerProcesses) {
-				taskManagerProcess.printProcessLog();
-			}
-
-			System.out.println("#################################################");
-			System.out.println(" JOB MANAGERS");
-			System.out.println("#################################################");
-
-			for (JobManagerProcess jobManagerProcess : jobManagerProcesses) {
-				jobManagerProcess.printProcessLog();
-			}
-
-			throw t;
-		}
-		finally {
-			for (JobManagerProcess jobManagerProcess : jobManagerProcesses) {
-				if (jobManagerProcess != null) {
-					jobManagerProcess.destroy();
-				}
-			}
-
-			if (leaderRetrievalService != null) {
-				leaderRetrievalService.stop();
-			}
-
-			if (testActorSystem != null) {
-				testActorSystem.shutdown();
-			}
-
-			highAvailabilityServices.closeAndCleanupAllData();
-		}
-	}
-
-	// - The test program --------------------------------------------------------------------------
-
-	private JobGraph createJobGraph(
-			int n,
-			String completedCheckpointMarker,
-			String proceedMarker,
-			int parallelism,
-			int checkpointingIntervalMs) {
-
-		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-		env.setParallelism(parallelism);
-		env.enableCheckpointing(checkpointingIntervalMs);
-
-		int expectedResult = parallelism * n * (n + 1) / 2;
-
-		env.addSource(new CheckpointedSequenceSource(n, completedCheckpointMarker, proceedMarker))
-				.addSink(new CountingSink(parallelism, expectedResult))
-				.setParallelism(1);
-
-		return env.getStreamGraph().getJobGraph();
-	}
-
-	public static class CheckpointedSequenceSource extends RichParallelSourceFunction<Long>
-			implements ListCheckpointed<Long>, CheckpointListener {
-
-		private static final long serialVersionUID = 0L;
-
-		private final long end;
-
-		private final String completedCheckpointMarkerFilePath;
-
-		private final File proceedFile;
-
-		private long current = 0;
-
-		private volatile boolean isRunning = true;
-
-		public CheckpointedSequenceSource(long end, String completedCheckpointMarkerFilePath, String proceedMarkerFilePath) {
-			checkArgument(end >= 0, "Negative final count");
-
-			this.end = end;
-			this.completedCheckpointMarkerFilePath = completedCheckpointMarkerFilePath;
-			this.proceedFile = new File(proceedMarkerFilePath);
-		}
-
-		@Override
-		public void run(SourceContext<Long> ctx) throws Exception {
-			while (isRunning) {
-
-				if (!proceedFile.exists()) {
-					Thread.sleep(50);
-				}
-
-				synchronized (ctx.getCheckpointLock()) {
-					if (current <= end) {
-						ctx.collect(current++);
-					}
-					else {
-						ctx.collect(LastElement);
-						return;
-					}
-				}
-			}
-		}
-
-		@Override
-		public List<Long> snapshotState(long checkpointId, long timestamp) throws Exception {
-			LOG.info("Snapshotting state {} @ ID {}.", current, checkpointId);
-			return Collections.singletonList(this.current);
-		}
-
-		@Override
-		public void restoreState(List<Long> state) throws Exception {
-			if (state.isEmpty() || state.size() > 1) {
-				throw new RuntimeException("Test failed due to unexpected recovered state size " + state.size());
-			}
-			LOG.info("Restoring state {}/{}", state.get(0), end);
-			this.current = state.get(0);
-		}
-
-		@Override
-		public void cancel() {
-			isRunning = false;
-		}
-
-		@Override
-		public void notifyCheckpointComplete(long checkpointId) throws Exception {
-			LOG.info("Checkpoint {} completed.", checkpointId);
-
-			int taskIndex = getRuntimeContext().getIndexOfThisSubtask();
-			TestJvmProcess.touchFile(new File(completedCheckpointMarkerFilePath + taskIndex));
-		}
-	}
-
-	public static class CountingSink extends RichSinkFunction<Long>
-			implements ListCheckpointed<CountingSink>, CheckpointListener {
-
-		private static final Logger LOG = LoggerFactory.getLogger(CountingSink.class);
-
-		private static final long serialVersionUID = 0L;
-
-		private final int parallelism;
-
-		private final long expectedFinalCount;
-
-		private long current;
-
-		private int numberOfReceivedLastElements;
-
-		public CountingSink(int parallelism, long expectedFinalCount) {
-			this.expectedFinalCount = expectedFinalCount;
-			this.parallelism = parallelism;
-		}
-
-		@Override
-		public void invoke(Long value) throws Exception {
-			if (value == LastElement) {
-				numberOfReceivedLastElements++;
-
-				if (numberOfReceivedLastElements == parallelism) {
-					if (current != expectedFinalCount) {
-						throw new Exception("Unexpected final result " + current);
-					}
-					else {
-						LOG.info("Final result " + current);
-					}
-				}
-				else if (numberOfReceivedLastElements > parallelism) {
-					throw new IllegalStateException("Received more elements than parallelism.");
-				}
-			}
-			else {
-				current += value;
-			}
-		}
-
-		@Override
-		public List<CountingSink> snapshotState(long checkpointId, long timestamp) throws Exception {
-			LOG.info("Snapshotting state {}:{} @ ID {}.", current, numberOfReceivedLastElements, checkpointId);
-			return Collections.singletonList(this);
-		}
-
-		@Override
-		public void restoreState(List<CountingSink> state) throws Exception {
-			if (state.isEmpty() || state.size() > 1) {
-				throw new RuntimeException("Test failed due to unexpected recovered state size " + state.size());
-			}
-			CountingSink sink = state.get(0);
-			this.current = sink.current;
-			this.numberOfReceivedLastElements = sink.numberOfReceivedLastElements;
-			LOG.info("Restoring state {}:{}", sink.current, sink.numberOfReceivedLastElements);
-		}
-
-		@Override
-		public void notifyCheckpointComplete(long checkpointId) throws Exception {
-			LOG.info("Checkpoint {} completed.", checkpointId);
-		}
-	}
-
-	// - Utilities ---------------------------------------------------------------------------------
-
-	private void submitJobGraph(
-			JobGraph jobGraph,
-			JobManagerProcess jobManager,
-			TestingListener leaderListener,
-			ActorSystem actorSystem,
-			FiniteDuration timeout) throws Exception {
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		UUID jobManagerLeaderId = leaderListener.getLeaderSessionID();
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, jobManagerLeaderId);
-
-		jobManagerGateway.tell(new JobManagerMessages.SubmitJob(jobGraph, ListeningBehaviour.DETACHED));
-	}
-
-	private void checkCleanRecoveryState(Configuration config) throws Exception {
-		LOG.info("Checking " + ZooKeeper.getClientNamespace() +
-				ConfigConstants.DEFAULT_ZOOKEEPER_JOBGRAPHS_PATH);
-		List<String> jobGraphs = ZooKeeper.getChildren(ConfigConstants.DEFAULT_ZOOKEEPER_JOBGRAPHS_PATH);
-		assertEquals("Unclean job graphs: " + jobGraphs, 0, jobGraphs.size());
-
-		LOG.info("Checking " + ZooKeeper.getClientNamespace() +
-				ConfigConstants.DEFAULT_ZOOKEEPER_CHECKPOINTS_PATH);
-
-		for (int i = 0; i < 10; i++) {
-			List<String> checkpoints = ZooKeeper.getChildren(ConfigConstants.DEFAULT_ZOOKEEPER_CHECKPOINTS_PATH);
-			assertEquals("Unclean checkpoints: " + checkpoints, 0, checkpoints.size());
-
-			LOG.info("Unclean... retrying in 2s.");
-			Thread.sleep(2000);
-		}
-
-		LOG.info("Checking " + ZooKeeper.getClientNamespace() +
-				ConfigConstants.DEFAULT_ZOOKEEPER_CHECKPOINT_COUNTER_PATH);
-		List<String> checkpointCounter = ZooKeeper.getChildren(ConfigConstants.DEFAULT_ZOOKEEPER_CHECKPOINT_COUNTER_PATH);
-		assertEquals("Unclean checkpoint counter: " + checkpointCounter, 0, checkpointCounter.size());
-
-		LOG.info("ZooKeeper state is clean");
-
-		LOG.info("Checking file system backend state...");
-
-		File fsCheckpoints = new File(new URI(config.getString(FsStateBackendFactory.CHECKPOINT_DIRECTORY_URI_CONF_KEY, "")).getPath());
-
-		LOG.info("Checking " + fsCheckpoints);
-
-		File[] files = fsCheckpoints.listFiles();
-		if (files == null) {
-			fail(fsCheckpoints + " does not exist: " + Arrays.toString(FileStateBackendBasePath.listFiles()));
-		}
-
-		File fsRecovery = new File(new URI(config.getString(HighAvailabilityOptions.HA_STORAGE_PATH)).getPath());
-
-		LOG.info("Checking " + fsRecovery);
-
-		files = fsRecovery.listFiles();
-		if (files == null) {
-			fail(fsRecovery + " does not exist: " + Arrays.toString(FileStateBackendBasePath.listFiles()));
-		}
-	}
-
-	private void waitForJobRemoved(
-			JobID jobId, JobManagerProcess jobManager, ActorSystem actorSystem, FiniteDuration timeout)
-			throws Exception {
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, null);
-
-		Future<Object> archiveFuture = jobManagerGateway.ask(JobManagerMessages.getRequestArchive(), timeout);
-
-		ActorRef archive = ((JobManagerMessages.ResponseArchive) Await.result(archiveFuture, timeout)).actor();
-
-		AkkaActorGateway archiveGateway = new AkkaActorGateway(archive, null);
-
-		Deadline deadline = timeout.fromNow();
-
-		while (deadline.hasTimeLeft()) {
-			JobManagerMessages.JobStatusResponse resp = JobManagerActorTestUtils
-					.requestJobStatus(jobId, archiveGateway, deadline.timeLeft());
-
-			if (resp instanceof JobManagerMessages.JobNotFound) {
-				Thread.sleep(100);
-			}
-			else {
-				return;
-			}
-		}
-	}
-
-	private JobStatus requestJobStatus(
-			JobID jobId, JobManagerProcess jobManager, ActorSystem actorSystem, FiniteDuration timeout)
-			throws Exception {
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, null);
-
-		JobManagerMessages.JobStatusResponse resp = JobManagerActorTestUtils
-				.requestJobStatus(jobId, jobManagerGateway, timeout);
-
-		if (resp instanceof JobManagerMessages.CurrentJobStatus) {
-			JobManagerMessages.CurrentJobStatus jobStatusResponse = (JobManagerMessages
-					.CurrentJobStatus) resp;
-
-			return jobStatusResponse.status();
-		}
-		else if (resp instanceof JobManagerMessages.JobNotFound) {
-			return JobStatus.RESTARTING;
-		}
-
-		throw new IllegalStateException("Unexpected response from JobManager");
-	}
-
-	private void waitForJobRunning(
-			JobID jobId, JobManagerProcess jobManager, ActorSystem actorSystem, FiniteDuration timeout)
-			throws Exception {
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, null);
-
-		JobManagerActorTestUtils.waitForJobStatus(jobId, JobStatus.RUNNING, jobManagerGateway, timeout);
-	}
-
-	private void waitForJobFinished(
-			JobID jobId, JobManagerProcess jobManager, ActorSystem actorSystem, FiniteDuration timeout)
-			throws Exception {
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, null);
-
-		JobManagerActorTestUtils.waitForJobStatus(jobId, JobStatus.FINISHED, jobManagerGateway, timeout);
-	}
-
-	private void waitForTaskManagers(
-			int minimumNumberOfTaskManagers,
-			JobManagerProcess jobManager,
-			ActorSystem actorSystem,
-			FiniteDuration timeout) throws Exception {
-
-		LOG.info("Waiting for {} task managers to connect to leading {}",
-				minimumNumberOfTaskManagers, jobManager);
-
-		ActorRef jobManagerRef = jobManager.getActorRef(actorSystem, timeout);
-		AkkaActorGateway jobManagerGateway = new AkkaActorGateway(jobManagerRef, null);
-
-		JobManagerActorTestUtils.waitForTaskManagers(
-				minimumNumberOfTaskManagers, jobManagerGateway, timeout);
-
-		LOG.info("All task managers connected");
-	}
-
-	private int waitForNewLeader(
-			TestingListener leaderListener,
-			List<JobManagerProcess> jobManagerProcesses,
-			FiniteDuration timeout) throws Exception {
-
-		LOG.info("Waiting for new leader notification");
-		leaderListener.waitForNewLeader(timeout.toMillis());
-
-		LOG.info("Leader: {}:{}", leaderListener.getAddress(), leaderListener.getLeaderSessionID());
-
-		String currentLeader = leaderListener.getAddress();
-
-		int leaderIndex = -1;
-
-		for (int i = 0; i < jobManagerProcesses.size(); i++) {
-			JobManagerProcess jobManager = jobManagerProcesses.get(i);
-			if (jobManager.getJobManagerAkkaURL(timeout).equals(currentLeader)) {
-				leaderIndex = i;
-				break;
-			}
-		}
-
-		if (leaderIndex == -1) {
-			throw new IllegalStateException("Failed to determine which process is leader");
-		}
-
-		return leaderIndex;
-	}
-
-	private JobManagerProcess createAndStartJobManagerProcess(Configuration config)
-			throws Exception {
-
-		JobManagerProcess jobManager = new JobManagerProcess(jobManagerPid++, config);
-		jobManager.startProcess();
-		LOG.info("Created and started {}.", jobManager);
-
-		return jobManager;
-	}
-
-	private TaskManagerProcess createAndStartTaskManagerProcess(Configuration config)
-			throws Exception {
-
-		TaskManagerProcess taskManager = new TaskManagerProcess(taskManagerPid++, config);
-		taskManager.startProcess();
-		LOG.info("Created and started {}.", taskManager);
-
-		return taskManager;
-	}
-
-}


[7/7] flink git commit: [FLINK-6699] Activate strict checkstyle for flink-yarn-tests

Posted by ch...@apache.org.
[FLINK-6699] Activate strict checkstyle for flink-yarn-tests

This closes #3985.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/0e69dd5c
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/0e69dd5c
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/0e69dd5c

Branch: refs/heads/master
Commit: 0e69dd5cc603443355a722fbbac5d96264eae1d2
Parents: 20e4b99
Author: zentol <ch...@apache.org>
Authored: Wed May 24 12:42:28 2017 +0200
Committer: zentol <ch...@apache.org>
Committed: Thu May 25 10:59:23 2017 +0200

----------------------------------------------------------------------
 ...CliFrontendYarnAddressConfigurationTest.java |  23 +--
 .../flink/yarn/FlinkYarnSessionCliTest.java     |  26 +--
 .../yarn/TestingYarnClusterDescriptor.java      |   4 +-
 .../java/org/apache/flink/yarn/UtilsTest.java   |  31 ++-
 .../flink/yarn/YARNHighAvailabilityITCase.java  |  29 +--
 .../YARNSessionCapacitySchedulerITCase.java     | 111 +++++-----
 .../flink/yarn/YARNSessionFIFOITCase.java       |  57 +++--
 .../yarn/YARNSessionFIFOSecuredITCase.java      |  20 +-
 .../flink/yarn/YarnClusterDescriptorTest.java   |   9 +-
 .../org/apache/flink/yarn/YarnTestBase.java     | 207 ++++++++++---------
 .../flink/yarn/TestingYarnTaskManager.scala     |   2 -
 11 files changed, 265 insertions(+), 254 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/CliFrontendYarnAddressConfigurationTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/CliFrontendYarnAddressConfigurationTest.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/CliFrontendYarnAddressConfigurationTest.java
index 6a8c266..aa93b00 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/CliFrontendYarnAddressConfigurationTest.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/CliFrontendYarnAddressConfigurationTest.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import org.apache.commons.cli.CommandLine;
-
 import org.apache.flink.client.CliFrontend;
 import org.apache.flink.client.cli.CliFrontendParser;
 import org.apache.flink.client.cli.CommandLineOptions;
@@ -33,6 +31,8 @@ import org.apache.flink.configuration.HighAvailabilityOptions;
 import org.apache.flink.configuration.IllegalConfigurationException;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
+import org.apache.commons.cli.CommandLine;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -40,14 +40,12 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.client.api.impl.YarnClientImpl;
 import org.apache.hadoop.yarn.exceptions.YarnException;
-
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
-
 import org.mockito.Mockito;
 
 import java.io.File;
@@ -73,8 +71,8 @@ public class CliFrontendYarnAddressConfigurationTest {
 	@Rule
 	public TemporaryFolder temporaryFolder = new TemporaryFolder();
 
-	private final static PrintStream OUT = System.out;
-	private final static PrintStream ERR = System.err;
+	private static final PrintStream OUT = System.out;
+	private static final PrintStream ERR = System.err;
 
 	@BeforeClass
 	public static void disableStdOutErr() {
@@ -106,7 +104,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 
 	private static final String validPropertiesFile = "applicationID=" + TEST_YARN_APPLICATION_ID;
 
-
 	private static final String TEST_JOB_MANAGER_ADDRESS = "192.168.1.33";
 	private static final int TEST_JOB_MANAGER_PORT = 55443;
 
@@ -114,11 +111,9 @@ public class CliFrontendYarnAddressConfigurationTest {
 		"jobmanager.rpc.address: " + TEST_JOB_MANAGER_ADDRESS + "\n" +
 		"jobmanager.rpc.port: " + TEST_JOB_MANAGER_PORT;
 
-
 	private static final String invalidPropertiesFile =
 		"jasfobManager=" + TEST_YARN_JOB_MANAGER_ADDRESS + ":asf" + TEST_YARN_JOB_MANAGER_PORT;
 
-
 	/**
 	 * Test that the CliFrontend is able to pick up the .yarn-properties file from a specified location.
 	 */
@@ -175,7 +170,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 			TEST_JOB_MANAGER_PORT);
 	}
 
-
 	@Test
 	public void testResumeFromYarnID() throws Exception {
 		File directoryPath = writeYarnPropertiesFile(validPropertiesFile);
@@ -257,7 +251,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 			TEST_YARN_JOB_MANAGER_PORT);
 	}
 
-
 	@Test
 	public void testYarnIDOverridesPropertiesFile() throws Exception {
 		File directoryPath = writeYarnPropertiesFile(invalidPropertiesFile);
@@ -276,7 +269,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 			TEST_YARN_JOB_MANAGER_PORT);
 	}
 
-
 	@Test
 	public void testManualOptionsOverridesYarn() throws Exception {
 
@@ -307,7 +299,7 @@ public class CliFrontendYarnAddressConfigurationTest {
 		String currentUser = System.getProperty("user.name");
 
 		// copy .yarn-properties-<username>
-		File testPropertiesFile = new File(tmpFolder, ".yarn-properties-"+currentUser);
+		File testPropertiesFile = new File(tmpFolder, ".yarn-properties-" + currentUser);
 		Files.write(testPropertiesFile.toPath(), contents.getBytes(), StandardOpenOption.CREATE);
 
 		// copy reference flink-conf.yaml to temporary test directory and append custom configuration path.
@@ -336,9 +328,8 @@ public class CliFrontendYarnAddressConfigurationTest {
 		}
 	}
 
-
 	/**
-	 * Injects an extended FlinkYarnSessionCli that deals with mocking Yarn communication
+	 * Injects an extended FlinkYarnSessionCli that deals with mocking Yarn communication.
 	 */
 	private static class CustomYarnTestCLI extends TestCLI {
 
@@ -396,7 +387,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 					return Mockito.mock(YarnClusterClient.class);
 				}
 
-
 				private class TestYarnClient extends YarnClientImpl {
 
 					private final List<ApplicationReport> reports = new LinkedList<>();
@@ -439,7 +429,6 @@ public class CliFrontendYarnAddressConfigurationTest {
 		}
 	}
 
-
 	private static void checkJobManagerAddress(Configuration config, String expectedAddress, int expectedPort) {
 		String jobManagerAddress = config.getString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, null);
 		int jobManagerPort = config.getInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, -1);

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java
index 4da5a39..5e8cffd 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java
@@ -18,19 +18,19 @@
 
 package org.apache.flink.yarn;
 
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
-import org.apache.commons.cli.Options;
-import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.client.CliFrontend;
 import org.apache.flink.client.cli.CliFrontendParser;
 import org.apache.flink.client.cli.RunOptions;
 import org.apache.flink.client.program.ClusterClient;
+import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
-import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 import org.apache.flink.test.util.TestBaseUtils;
+import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.client.api.YarnClient;
@@ -41,18 +41,20 @@ import org.junit.rules.TemporaryFolder;
 import org.mockito.Mockito;
 
 import java.io.File;
-import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.HashMap;
 import java.util.Map;
 
+/**
+ * Tests for the FlinkYarnSessionCli.
+ */
 public class FlinkYarnSessionCliTest {
 
 	@Rule
 	public TemporaryFolder tmp = new TemporaryFolder();
 
 	@Test
-	public void testDynamicProperties() throws IOException {
+	public void testDynamicProperties() throws Exception {
 
 		Map<String, String> map = new HashMap<String, String>(System.getenv());
 		File tmpFolder = tmp.newFolder();
@@ -66,14 +68,8 @@ public class FlinkYarnSessionCliTest {
 		cli.addRunOptions(options);
 
 		CommandLineParser parser = new DefaultParser();
-		CommandLine cmd = null;
-		try {
-			cmd = parser.parse(options, new String[]{"run", "-j", "fake.jar", "-n", "15",
+		CommandLine cmd = parser.parse(options, new String[]{"run", "-j", "fake.jar", "-n", "15",
 				"-D", "akka.ask.timeout=5 min", "-D", "env.java.opts=-DappName=foobar"});
-		} catch(Exception e) {
-			e.printStackTrace();
-			Assert.fail("Parsing failed with " + e.getMessage());
-		}
 
 		AbstractYarnClusterDescriptor flinkYarnDescriptor = cli.createDescriptor(null, cmd);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/TestingYarnClusterDescriptor.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/TestingYarnClusterDescriptor.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/TestingYarnClusterDescriptor.java
index a3337bb..377edaa 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/TestingYarnClusterDescriptor.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/TestingYarnClusterDescriptor.java
@@ -27,7 +27,7 @@ import java.util.List;
 
 /**
  * Yarn client which starts a {@link TestingApplicationMaster}. Additionally the client adds the
- * flink-yarn-tests-XXX-tests.jar and the flink-runtime-XXX-tests.jar to the set of files which
+ * flink-yarn-tests-X-tests.jar and the flink-runtime-X-tests.jar to the set of files which
  * are shipped to the yarn cluster. This is necessary to load the testing classes.
  */
 public class TestingYarnClusterDescriptor extends AbstractYarnClusterDescriptor {
@@ -59,7 +59,7 @@ public class TestingYarnClusterDescriptor extends AbstractYarnClusterDescriptor
 		return TestingApplicationMaster.class;
 	}
 
-	public static class TestJarFinder implements FilenameFilter {
+	private static class TestJarFinder implements FilenameFilter {
 
 		private final String jarName;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java
index c710064..82a656c 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/UtilsTest.java
@@ -15,10 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn;
 
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
+
 import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Level;
 import org.apache.log4j.spi.LoggingEvent;
@@ -33,6 +35,9 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * Tests for various utilities.
+ */
 public class UtilsTest {
 	private static final Logger LOG = LoggerFactory.getLogger(UtilsTest.class);
 
@@ -60,8 +65,8 @@ public class UtilsTest {
 		conf.setDouble(ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_RATIO, 0.15);
 		conf.setInteger(ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_MIN, 384);
 
-		Assert.assertEquals(616, Utils.calculateHeapSize(1000, conf) );
-		Assert.assertEquals(8500, Utils.calculateHeapSize(10000, conf) );
+		Assert.assertEquals(616, Utils.calculateHeapSize(1000, conf));
+		Assert.assertEquals(8500, Utils.calculateHeapSize(10000, conf));
 
 		// test different configuration
 		Assert.assertEquals(3400, Utils.calculateHeapSize(4000, conf));
@@ -81,8 +86,8 @@ public class UtilsTest {
 		conf.setDouble(ConfigConstants.YARN_HEAP_CUTOFF_RATIO, 0.15);
 		conf.setInteger(ConfigConstants.YARN_HEAP_CUTOFF_MIN, 384);
 
-		Assert.assertEquals(616, Utils.calculateHeapSize(1000, conf) );
-		Assert.assertEquals(8500, Utils.calculateHeapSize(10000, conf) );
+		Assert.assertEquals(616, Utils.calculateHeapSize(1000, conf));
+		Assert.assertEquals(8500, Utils.calculateHeapSize(10000, conf));
 	}
 
 	@Test(expected = IllegalArgumentException.class)
@@ -145,15 +150,15 @@ public class UtilsTest {
 
 	public static void checkForLogString(String expected) {
 		LoggingEvent found = getEventContainingString(expected);
-		if(found != null) {
-			LOG.info("Found expected string '"+expected+"' in log message "+found);
+		if (found != null) {
+			LOG.info("Found expected string '" + expected + "' in log message " + found);
 			return;
 		}
 		Assert.fail("Unable to find expected string '" + expected + "' in log messages");
 	}
 
 	public static LoggingEvent getEventContainingString(String expected) {
-		if(testAppender == null) {
+		if (testAppender == null) {
 			throw new NullPointerException("Initialize test appender first");
 		}
 		LoggingEvent found = null;
@@ -169,10 +174,16 @@ public class UtilsTest {
 		return found;
 	}
 
-	public static class TestAppender extends AppenderSkeleton {
+	private static class TestAppender extends AppenderSkeleton {
 		public final List<LoggingEvent> events = new ArrayList<>();
-		public void close() {}
-		public boolean requiresLayout() {return false;}
+
+		public void close() {
+		}
+
+		public boolean requiresLayout() {
+			return false;
+		}
+
 		@Override
 		protected void append(LoggingEvent event) {
 			synchronized (events){

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNHighAvailabilityITCase.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNHighAvailabilityITCase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNHighAvailabilityITCase.java
index 1f043ef..327b376 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNHighAvailabilityITCase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNHighAvailabilityITCase.java
@@ -18,10 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorSystem;
-import akka.actor.PoisonPill;
-import akka.testkit.JavaTestKit;
-import org.apache.curator.test.TestingServer;
 import org.apache.flink.client.program.ClusterClient;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.CoreOptions;
@@ -37,6 +33,11 @@ import org.apache.flink.runtime.messages.Acknowledge;
 import org.apache.flink.runtime.state.filesystem.FsStateBackendFactory;
 import org.apache.flink.runtime.testingUtils.TestingJobManagerMessages;
 import org.apache.flink.runtime.util.LeaderRetrievalUtils;
+
+import akka.actor.ActorSystem;
+import akka.actor.PoisonPill;
+import akka.testkit.JavaTestKit;
+import org.apache.curator.test.TestingServer;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.AfterClass;
@@ -45,19 +46,23 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
-import scala.concurrent.duration.FiniteDuration;
 
 import java.io.File;
 import java.util.Arrays;
 import java.util.concurrent.TimeUnit;
 
+import scala.concurrent.duration.FiniteDuration;
+
+/**
+ * Tests that verify correct HA behavior.
+ */
 public class YARNHighAvailabilityITCase extends YarnTestBase {
 
-	protected static TestingServer zkServer;
+	private static TestingServer zkServer;
 
-	protected static ActorSystem actorSystem;
+	private static ActorSystem actorSystem;
 
-	protected static final int numberApplicationAttempts = 3;
+	private static final int numberApplicationAttempts = 3;
 
 	@Rule
 	public TemporaryFolder temp = new TemporaryFolder();
@@ -74,15 +79,15 @@ public class YARNHighAvailabilityITCase extends YarnTestBase {
 			Assert.fail("Could not start ZooKeeper testing cluster.");
 		}
 
-		yarnConfiguration.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-ha");
-		yarnConfiguration.set(YarnConfiguration.RM_AM_MAX_ATTEMPTS, "" + numberApplicationAttempts);
+		YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-ha");
+		YARN_CONFIGURATION.set(YarnConfiguration.RM_AM_MAX_ATTEMPTS, "" + numberApplicationAttempts);
 
-		startYARNWithConfig(yarnConfiguration);
+		startYARNWithConfig(YARN_CONFIGURATION);
 	}
 
 	@AfterClass
 	public static void teardown() throws Exception {
-		if(zkServer != null) {
+		if (zkServer != null) {
 			zkServer.stop();
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java
index 2a3b6c6..b290756 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java
@@ -15,14 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Sets;
-import org.apache.commons.io.FileUtils;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.GlobalConfiguration;
 import org.apache.flink.runtime.client.JobClient;
@@ -30,6 +25,13 @@ import org.apache.flink.runtime.webmonitor.WebMonitorUtils;
 import org.apache.flink.test.testdata.WordCountData;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Sets;
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -56,7 +58,14 @@ import java.io.FilenameFilter;
 import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.util.*;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -64,7 +73,6 @@ import java.util.regex.Pattern;
 import static org.apache.flink.yarn.UtilsTest.addTestAppender;
 import static org.apache.flink.yarn.UtilsTest.checkForLogString;
 
-
 /**
  * This test starts a MiniYARNCluster with a CapacityScheduler.
  * Is has, by default a queue called "default". The configuration here adds another queue: "qa-team".
@@ -74,12 +82,12 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 
 	@BeforeClass
 	public static void setup() {
-		yarnConfiguration.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class);
-		yarnConfiguration.set("yarn.scheduler.capacity.root.queues", "default,qa-team");
-		yarnConfiguration.setInt("yarn.scheduler.capacity.root.default.capacity", 40);
-		yarnConfiguration.setInt("yarn.scheduler.capacity.root.qa-team.capacity", 60);
-		yarnConfiguration.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-capacityscheduler");
-		startYARNWithConfig(yarnConfiguration);
+		YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class);
+		YARN_CONFIGURATION.set("yarn.scheduler.capacity.root.queues", "default,qa-team");
+		YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.default.capacity", 40);
+		YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.qa-team.capacity", 60);
+		YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-capacityscheduler");
+		startYARNWithConfig(YARN_CONFIGURATION);
 	}
 
 	/**
@@ -99,7 +107,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 	/**
 	 * Test per-job yarn cluster
 	 *
-	 * This also tests the prefixed CliFrontend options for the YARN case
+	 * <p>This also tests the prefixed CliFrontend options for the YARN case
 	 * We also test if the requested parallelism of 2 is passed through.
 	 * The parallelism is requested at the YARN client (-ys).
 	 */
@@ -123,11 +131,10 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 		LOG.info("Finished perJobYarnCluster()");
 	}
 
-
 	/**
 	 * Test TaskManager failure and also if the vcores are set correctly (see issue FLINK-2213).
 	 */
-	@Test(timeout=100000) // timeout after 100 seconds
+	@Test(timeout = 100000) // timeout after 100 seconds
 	public void testTaskManagerFailure() {
 		LOG.info("Starting testTaskManagerFailure()");
 		Runner runner = startWithArgs(new String[]{"-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(),
@@ -149,7 +156,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 		YarnClient yc = null;
 		try {
 			yc = YarnClient.createYarnClient();
-			yc.init(yarnConfiguration);
+			yc.init(YARN_CONFIGURATION);
 			yc.start();
 
 			List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
@@ -157,10 +164,10 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			ApplicationReport app = apps.get(0);
 			Assert.assertEquals("customName", app.getName());
 			String url = app.getTrackingUrl();
-			if(!url.endsWith("/")) {
+			if (!url.endsWith("/")) {
 				url += "/";
 			}
-			if(!url.startsWith("http://")) {
+			if (!url.startsWith("http://")) {
 				url = "http://" + url;
 			}
 			LOG.info("Got application URL from YARN {}", url);
@@ -188,7 +195,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			Matcher matches = p.matcher(oC);
 			String hostname = null;
 			String port = null;
-			while(matches.find()) {
+			while (matches.find()) {
 				hostname = matches.group(1).toLowerCase();
 				port = matches.group(2);
 			}
@@ -204,8 +211,8 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			Assert.assertTrue(logs.contains("Starting YARN ApplicationMaster"));
 			Assert.assertTrue(logs.contains("Starting JobManager"));
 			Assert.assertTrue(logs.contains("Starting JobManager Web Frontend"));
-		} catch(Throwable e) {
-			LOG.warn("Error while running test",e);
+		} catch (Throwable e) {
+			LOG.warn("Error while running test", e);
 			Assert.fail(e.getMessage());
 		}
 
@@ -222,15 +229,15 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			LOG.warn("Unable to get curr user", e);
 			Assert.fail();
 		}
-		for(int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
+		for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
 			NodeManager nm = yarnCluster.getNodeManager(nmId);
 			ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers();
-			for(Map.Entry<ContainerId, Container> entry : containers.entrySet()) {
+			for (Map.Entry<ContainerId, Container> entry : containers.entrySet()) {
 				String command = Joiner.on(" ").join(entry.getValue().getLaunchContext().getCommands());
-				if(command.contains(YarnTaskManager.class.getSimpleName())) {
+				if (command.contains(YarnTaskManager.class.getSimpleName())) {
 					taskManagerContainer = entry.getKey();
 					nodeManager = nm;
-					nmIdent = new NMTokenIdentifier(taskManagerContainer.getApplicationAttemptId(), null, "",0);
+					nmIdent = new NMTokenIdentifier(taskManagerContainer.getApplicationAttemptId(), null, "", 0);
 					// allow myself to do stuff with the container
 					// remoteUgi.addCredentials(entry.getValue().getCredentials());
 					remoteUgi.addTokenIdentifier(nmIdent);
@@ -252,7 +259,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			nodeManager.getNMContext().getContainerManager().stopContainers(scr);
 		} catch (Throwable e) {
 			LOG.warn("Error stopping container", e);
-			Assert.fail("Error stopping container: "+e.getMessage());
+			Assert.fail("Error stopping container: " + e.getMessage());
 		}
 
 		// stateful termination check:
@@ -270,7 +277,6 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			sleep(1000);
 		} while(!ok);
 
-
 		// send "stop" command to command line interface
 		runner.sendStop();
 		// wait for the thread to stop
@@ -282,8 +288,8 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 		LOG.warn("stopped");
 
 		// ----------- Send output to logger
-		System.setOut(originalStdout);
-		System.setErr(originalStderr);
+		System.setOut(ORIGINAL_STDOUT);
+		System.setErr(ORIGINAL_STDERR);
 		String oC = outContent.toString();
 		String eC = errContent.toString();
 		LOG.info("Sending stdout content through logger: \n\n{}\n\n", oC);
@@ -354,7 +360,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 	/**
 	 * Test a fire-and-forget job submission to a YARN cluster.
 	 */
-	@Test(timeout=60000)
+	@Test(timeout = 60000)
 	public void testDetachedPerJobYarnCluster() {
 		LOG.info("Starting testDetachedPerJobYarnCluster()");
 
@@ -372,7 +378,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 	/**
 	 * Test a fire-and-forget job submission to a YARN cluster.
 	 */
-	@Test(timeout=60000)
+	@Test(timeout = 60000)
 	public void testDetachedPerJobYarnClusterWithStreamingJob() {
 		LOG.info("Starting testDetachedPerJobYarnClusterWithStreamingJob()");
 
@@ -388,25 +394,25 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 
 	private void testDetachedPerJobYarnClusterInternal(String job) {
 		YarnClient yc = YarnClient.createYarnClient();
-		yc.init(yarnConfiguration);
+		yc.init(YARN_CONFIGURATION);
 		yc.start();
 
 		// get temporary folder for writing output of wordcount example
 		File tmpOutFolder = null;
-		try{
+		try {
 			tmpOutFolder = tmp.newFolder();
 		}
-		catch(IOException e) {
+		catch (IOException e) {
 			throw new RuntimeException(e);
 		}
 
 		// get temporary file for reading input data for wordcount example
 		File tmpInFile;
-		try{
+		try {
 			tmpInFile = tmp.newFile();
 			FileUtils.writeStringToFile(tmpInFile, WordCountData.TEXT);
 		}
-		catch(IOException e) {
+		catch (IOException e) {
 			throw new RuntimeException(e);
 		}
 
@@ -450,7 +456,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 
 				LOG.info("waiting for the job with appId {} to finish", tmpAppId);
 				// wait until the app has finished
-				while(yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)).size() > 0) {
+				while (yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)).size() > 0) {
 					sleep(500);
 				}
 			} else {
@@ -459,7 +465,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 				Collections.sort(apps, new Comparator<ApplicationReport>() {
 					@Override
 					public int compare(ApplicationReport o1, ApplicationReport o2) {
-						return o1.getApplicationId().compareTo(o2.getApplicationId())*-1;
+						return o1.getApplicationId().compareTo(o2.getApplicationId()) * -1;
 					}
 				});
 				tmpAppId = apps.get(0).getApplicationId();
@@ -471,23 +477,20 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			// check the output files.
 			File[] listOfOutputFiles = tmpOutFolder.listFiles();
 
-
 			Assert.assertNotNull("Taskmanager output not found", listOfOutputFiles);
-			LOG.info("The job has finished. TaskManager output files found in {}", tmpOutFolder );
+			LOG.info("The job has finished. TaskManager output files found in {}", tmpOutFolder);
 
 			// read all output files in output folder to one output string
 			String content = "";
-			for(File f:listOfOutputFiles)
-			{
-				if(f.isFile())
-				{
+			for (File f:listOfOutputFiles) {
+				if (f.isFile()) {
 					content += FileUtils.readFileToString(f) + "\n";
 				}
 			}
 			//String content = FileUtils.readFileToString(taskmanagerOut);
 			// check for some of the wordcount outputs.
-			Assert.assertTrue("Expected string 'da 5' or '(all,2)' not found in string '"+content+"'", content.contains("da 5") || content.contains("(da,5)") || content.contains("(all,2)"));
-			Assert.assertTrue("Expected string 'der 29' or '(mind,1)' not found in string'"+content+"'",content.contains("der 29") || content.contains("(der,29)") || content.contains("(mind,1)"));
+			Assert.assertTrue("Expected string 'da 5' or '(all,2)' not found in string '" + content + "'", content.contains("da 5") || content.contains("(da,5)") || content.contains("(all,2)"));
+			Assert.assertTrue("Expected string 'der 29' or '(mind,1)' not found in string'" + content + "'", content.contains("der 29") || content.contains("(der,29)") || content.contains("(mind,1)"));
 
 			// check if the heap size for the TaskManager was set correctly
 			File jobmanagerLog = YarnTestBase.findFile("..", new FilenameFilter() {
@@ -500,11 +503,11 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 			content = FileUtils.readFileToString(jobmanagerLog);
 			// TM was started with 1024 but we cut off 50% (NOT THE DEFAULT VALUE)
 			String expected = "Starting TaskManagers with command: $JAVA_HOME/bin/java -Xms424m -Xmx424m";
-			Assert.assertTrue("Expected string '" + expected + "' not found in JobManager log: '"+jobmanagerLog+"'",
+			Assert.assertTrue("Expected string '" + expected + "' not found in JobManager log: '" + jobmanagerLog + "'",
 				content.contains(expected));
 			expected = " (2/2) (attempt #0) to ";
 			Assert.assertTrue("Expected string '" + expected + "' not found in JobManager log." +
-					"This string checks that the job has been started with a parallelism of 2. Log contents: '"+jobmanagerLog+"'",
+					"This string checks that the job has been started with a parallelism of 2. Log contents: '" + jobmanagerLog + "'",
 				content.contains(expected));
 
 			// make sure the detached app is really finished.
@@ -514,10 +517,10 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 				sleep(500);
 				rep = yc.getApplicationReport(id);
 				LOG.info("Got report {}", rep);
-			} while(rep.getYarnApplicationState() == YarnApplicationState.RUNNING);
+			} while (rep.getYarnApplicationState() == YarnApplicationState.RUNNING);
 
 			verifyApplicationTags(rep);
-		} catch(Throwable t) {
+		} catch (Throwable t) {
 			LOG.warn("Error while detached yarn session was running", t);
 			Assert.fail(t.getMessage());
 		} finally {
@@ -533,7 +536,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 
 			try {
 				File yarnPropertiesFile = FlinkYarnSessionCli.getYarnPropertiesLocation(GlobalConfiguration.loadConfiguration());
-				if(yarnPropertiesFile.exists()) {
+				if (yarnPropertiesFile.exists()) {
 					LOG.info("testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}", yarnPropertiesFile.getAbsolutePath());
 					yarnPropertiesFile.delete();
 				}
@@ -547,7 +550,7 @@ public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
 	/**
 	 * Ensures that the YARN application tags were set properly.
 	 *
-	 * Since YARN application tags were only added in Hadoop 2.4, but Flink still supports Hadoop 2.3, reflection is
+	 * <p>Since YARN application tags were only added in Hadoop 2.4, but Flink still supports Hadoop 2.3, reflection is
 	 * required to invoke the methods. If the method does not exist, this test passes.
 	 */
 	private void verifyApplicationTags(final ApplicationReport report) throws InvocationTargetException,

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOITCase.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOITCase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOITCase.java
index f45fe82..64fc5d1 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOITCase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOITCase.java
@@ -20,9 +20,9 @@ package org.apache.flink.yarn;
 
 import org.apache.flink.client.program.ClusterClient;
 import org.apache.flink.configuration.ConfigConstants;
-import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 import org.apache.flink.configuration.GlobalConfiguration;
 import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse;
+import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -32,15 +32,12 @@ import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
-
 import org.apache.log4j.Level;
-
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,7 +49,6 @@ import java.util.List;
 import static org.apache.flink.yarn.UtilsTest.addTestAppender;
 import static org.apache.flink.yarn.UtilsTest.checkForLogString;
 
-
 /**
  * This test starts a MiniYARNCluster with a FIFO scheduler.
  * There are no queues for that scheduler.
@@ -65,11 +61,11 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 	 */
 	@BeforeClass
 	public static void setup() {
-		yarnConfiguration.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
-		yarnConfiguration.setInt(YarnConfiguration.NM_PMEM_MB, 768);
-		yarnConfiguration.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
-		yarnConfiguration.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo");
-		startYARNWithConfig(yarnConfiguration);
+		YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
+		YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo");
+		startYARNWithConfig(YARN_CONFIGURATION);
 	}
 
 	@After
@@ -80,7 +76,7 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 	/**
 	 * Test regular operation, including command line parameter parsing.
 	 */
-	@Test(timeout=60000) // timeout after a minute.
+	@Test(timeout = 60000) // timeout after a minute.
 	public void testDetachedMode() throws InterruptedException {
 		LOG.info("Starting testDetachedMode()");
 		addTestAppender(FlinkYarnSessionCli.class, Level.INFO);
@@ -100,7 +96,7 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 
 		LOG.info("Waiting until two containers are running");
 		// wait until two containers are running
-		while(getRunningContainers() < 2) {
+		while (getRunningContainers() < 2) {
 			sleep(500);
 		}
 
@@ -111,7 +107,7 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 		// kill application "externally".
 		try {
 			YarnClient yc = YarnClient.createYarnClient();
-			yc.init(yarnConfiguration);
+			yc.init(YARN_CONFIGURATION);
 			yc.start();
 			List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
 			Assert.assertEquals(1, apps.size()); // Only one running
@@ -121,10 +117,10 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 			ApplicationId id = app.getApplicationId();
 			yc.killApplication(id);
 
-			while(yc.getApplications(EnumSet.of(YarnApplicationState.KILLED)).size() == 0) {
+			while (yc.getApplications(EnumSet.of(YarnApplicationState.KILLED)).size() == 0) {
 				sleep(500);
 			}
-		} catch(Throwable t) {
+		} catch (Throwable t) {
 			LOG.warn("Killing failed", t);
 			Assert.fail();
 		} finally {
@@ -140,7 +136,7 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 
 			try {
 				File yarnPropertiesFile = FlinkYarnSessionCli.getYarnPropertiesLocation(GlobalConfiguration.loadConfiguration());
-				if(yarnPropertiesFile.exists()) {
+				if (yarnPropertiesFile.exists()) {
 					LOG.info("testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}", yarnPropertiesFile.getAbsolutePath());
 					yarnPropertiesFile.delete();
 				}
@@ -156,26 +152,25 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 	/**
 	 * Test querying the YARN cluster.
 	 *
-	 * This test validates through 666*2 cores in the "cluster".
+	 * <p>This test validates through 666*2 cores in the "cluster".
 	 */
 	@Test
 	public void testQueryCluster() {
 		LOG.info("Starting testQueryCluster()");
-		runWithArgs(new String[] {"-q"}, "Summary: totalMemory 8192 totalCores 1332",null, RunTypes.YARN_SESSION, 0); // we have 666*2 cores.
+		runWithArgs(new String[] {"-q"}, "Summary: totalMemory 8192 totalCores 1332", null, RunTypes.YARN_SESSION, 0); // we have 666*2 cores.
 		LOG.info("Finished testQueryCluster()");
 	}
 
-
 	/**
 	 * The test cluster has the following resources:
 	 * - 2 Nodes with 4096 MB each.
 	 * - RM_SCHEDULER_MINIMUM_ALLOCATION_MB is 512
 	 *
-	 * We allocate:
+	 * <p>We allocate:
 	 * 1 JobManager with 256 MB (will be automatically upgraded to 512 due to min alloc mb)
 	 * 5 TaskManagers with 1585 MB
 	 *
-	 * user sees a total request of: 8181 MB (fits)
+	 * <p>user sees a total request of: 8181 MB (fits)
 	 * system sees a total request of: 8437 (doesn't fit due to min alloc mb)
 	 */
 	@Ignore("The test is too resource consuming (8.5 GB of memory)")
@@ -196,15 +191,15 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 	 * - 2 Nodes with 4096 MB each.
 	 * - RM_SCHEDULER_MINIMUM_ALLOCATION_MB is 512
 	 *
-	 * We allocate:
+	 * <p>We allocate:
 	 * 1 JobManager with 256 MB (will be automatically upgraded to 512 due to min alloc mb)
 	 * 2 TaskManagers with 3840 MB
 	 *
-	 * the user sees a total request of: 7936 MB (fits)
+	 * <p>the user sees a total request of: 7936 MB (fits)
 	 * the system sees a request of: 8192 MB (fits)
 	 * HOWEVER: one machine is going to need 3840 + 512 = 4352 MB, which doesn't fit.
 	 *
-	 * --> check if the system properly rejects allocating this session.
+	 * <p>--> check if the system properly rejects allocating this session.
 	 */
 	@Ignore("The test is too resource consuming (8 GB of memory)")
 	@Test
@@ -221,11 +216,11 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 	}
 
 	/**
-	 * Test the YARN Java API
+	 * Test the YARN Java API.
 	 */
 	@Test
 	public void testJavaAPI() throws Exception {
-		final int WAIT_TIME = 15;
+		final int waitTime = 15;
 		LOG.info("Starting testJavaAPI()");
 
 		AbstractYarnClusterDescriptor flinkYarnClient = new YarnClusterDescriptor();
@@ -246,23 +241,23 @@ public class YARNSessionFIFOITCase extends YarnTestBase {
 			yarnCluster = flinkYarnClient.deploy();
 		} catch (Exception e) {
 			LOG.warn("Failing test", e);
-			Assert.fail("Error while deploying YARN cluster: "+e.getMessage());
+			Assert.fail("Error while deploying YARN cluster: " + e.getMessage());
 		}
 		GetClusterStatusResponse expectedStatus = new GetClusterStatusResponse(1, 1);
-		for(int second = 0; second < WAIT_TIME * 2; second++) { // run "forever"
+		for (int second = 0; second < waitTime * 2; second++) { // run "forever"
 			try {
 				Thread.sleep(1000);
 			} catch (InterruptedException e) {
 				LOG.warn("Interrupted", e);
 			}
 			GetClusterStatusResponse status = yarnCluster.getClusterStatus();
-			if(status != null && status.equals(expectedStatus)) {
+			if (status != null && status.equals(expectedStatus)) {
 				LOG.info("ClusterClient reached status " + status);
 				break; // all good, cluster started
 			}
-			if(second > WAIT_TIME) {
+			if (second > waitTime) {
 				// we waited for 15 seconds. cluster didn't come up correctly
-				Assert.fail("The custer didn't start after " + WAIT_TIME + " seconds");
+				Assert.fail("The custer didn't start after " + waitTime + " seconds");
 			}
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
index d3558a9..c9f120b 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionFIFOSecuredITCase.java
@@ -23,6 +23,7 @@ import org.apache.flink.configuration.SecurityOptions;
 import org.apache.flink.runtime.security.SecurityUtils;
 import org.apache.flink.test.util.SecureTestEnvironment;
 import org.apache.flink.test.util.TestingSecurityContext;
+
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
@@ -33,6 +34,9 @@ import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.Callable;
 
+/**
+ * An extension of the {@link YARNSessionFIFOITCase} that runs the tests in a secured YARN cluster.
+ */
 public class YARNSessionFIFOSecuredITCase extends YARNSessionFIFOITCase {
 
 	protected static final Logger LOG = LoggerFactory.getLogger(YARNSessionFIFOSecuredITCase.class);
@@ -42,14 +46,14 @@ public class YARNSessionFIFOSecuredITCase extends YARNSessionFIFOITCase {
 
 		LOG.info("starting secure cluster environment for testing");
 
-		yarnConfiguration.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
-		yarnConfiguration.setInt(YarnConfiguration.NM_PMEM_MB, 768);
-		yarnConfiguration.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
-		yarnConfiguration.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");
+		YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
+		YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");
 
 		SecureTestEnvironment.prepare(tmp);
 
-		populateYarnSecureConfigurations(yarnConfiguration, SecureTestEnvironment.getHadoopServicePrincipal(),
+		populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
 				SecureTestEnvironment.getTestKeytab());
 
 		Configuration flinkConfig = new Configuration();
@@ -59,20 +63,20 @@ public class YARNSessionFIFOSecuredITCase extends YARNSessionFIFOITCase {
 				SecureTestEnvironment.getHadoopServicePrincipal());
 
 		SecurityUtils.SecurityConfiguration ctx = new SecurityUtils.SecurityConfiguration(flinkConfig,
-				yarnConfiguration);
+			YARN_CONFIGURATION);
 		try {
 			TestingSecurityContext.install(ctx, SecureTestEnvironment.getClientSecurityConfigurationMap());
 
 			SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() {
 				@Override
 				public Integer call() {
-					startYARNSecureMode(yarnConfiguration, SecureTestEnvironment.getHadoopServicePrincipal(),
+					startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(),
 							SecureTestEnvironment.getTestKeytab());
 					return null;
 				}
 			});
 
-		} catch(Exception e) {
+		} catch (Exception e) {
 			throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e);
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
index 5cf3ddc..c40a300 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
@@ -15,11 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn;
 
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.test.util.TestBaseUtils;
+
 import org.apache.hadoop.fs.Path;
 import org.junit.Assert;
 import org.junit.Rule;
@@ -34,13 +36,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+/**
+ * Tests for the YarnClusterDescriptor.
+ */
 public class YarnClusterDescriptorTest {
 
 	@Rule
 	public TemporaryFolder temporaryFolder = new TemporaryFolder();
 
 	/**
-	 * Tests to ship a lib folder through the {@code YarnClusterDescriptor.addShipFiles}
+	 * Tests to ship a lib folder through the {@code YarnClusterDescriptor.addShipFiles}.
 	 */
 	@Test
 	public void testExplicitLibShipping() throws Exception {
@@ -77,7 +82,7 @@ public class YarnClusterDescriptorTest {
 	}
 
 	/**
-	 * Tests to ship a lib folder through the {@code ConfigConstants.ENV_FLINK_LIB_DIR}
+	 * Tests to ship a lib folder through the {@code ConfigConstants.ENV_FLINK_LIB_DIR}.
 	 */
 	@Test
 	public void testEnvironmentLibShipping() throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
index ca8a0da..68a1509 100644
--- a/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
+++ b/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
@@ -18,9 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.Identify;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.client.CliFrontend;
 import org.apache.flink.client.cli.CommandLineOptions;
 import org.apache.flink.client.program.ClusterClient;
@@ -28,9 +25,13 @@ import org.apache.flink.client.program.PackagedProgram;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.SecurityOptions;
 import org.apache.flink.runtime.instance.ActorGateway;
-import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.util.TestLogger;
+import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
+import akka.actor.Identify;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileUtil;
@@ -57,9 +58,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.Marker;
 import org.slf4j.MarkerFactory;
-import scala.concurrent.Await;
-import scala.concurrent.duration.FiniteDuration;
 
+import java.io.BufferedWriter;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileNotFoundException;
@@ -67,7 +67,6 @@ import java.io.FileWriter;
 import java.io.FilenameFilter;
 import java.io.IOException;
 import java.io.PrintStream;
-import java.io.BufferedWriter;
 import java.io.PrintWriter;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -78,34 +77,36 @@ import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.TimeUnit;
 import java.util.regex.Pattern;
 
+import scala.concurrent.Await;
+import scala.concurrent.duration.FiniteDuration;
 
 /**
  * This base class allows to use the MiniYARNCluster.
  * The cluster is re-used for all tests.
  *
- * This class is located in a different package which is build after flink-dist. This way,
+ * <p>This class is located in a different package which is build after flink-dist. This way,
  * we can use the YARN uberjar of flink to start a Flink YARN session.
  *
- * The test is not thread-safe. Parallel execution of tests is not possible!
+ * <p>The test is not thread-safe. Parallel execution of tests is not possible!
  */
 public abstract class YarnTestBase extends TestLogger {
 	private static final Logger LOG = LoggerFactory.getLogger(YarnTestBase.class);
 
-	protected final static PrintStream originalStdout = System.out;
-	protected final static PrintStream originalStderr = System.err;
+	protected static final PrintStream ORIGINAL_STDOUT = System.out;
+	protected static final PrintStream ORIGINAL_STDERR = System.err;
 
-	protected static String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name";
+	protected static final String TEST_CLUSTER_NAME_KEY = "flink-yarn-minicluster-name";
 
-	protected final static int NUM_NODEMANAGERS = 2;
+	protected static final int NUM_NODEMANAGERS = 2;
 
 	/** The tests are scanning for these strings in the final output. */
-	protected final static String[] PROHIBITED_STRINGS = {
+	protected static final String[] PROHIBITED_STRINGS = {
 			"Exception", // we don't want any exceptions to happen
 			"Started SelectChannelConnector@0.0.0.0:8081" // Jetty should start on a random port in YARN mode.
 	};
 
-	/** These strings are white-listed, overriding teh prohibited strings */
-	protected final static String[] WHITELISTED_STRINGS = {
+	/** These strings are white-listed, overriding teh prohibited strings. */
+	protected static final String[] WHITELISTED_STRINGS = {
 			"akka.remote.RemoteTransportExceptionNoStackTrace",
 			// workaround for annoying InterruptedException logging:
 		    // https://issues.apache.org/jira/browse/YARN-1022
@@ -119,11 +120,11 @@ public abstract class YarnTestBase extends TestLogger {
 	protected static MiniYARNCluster yarnCluster = null;
 
 	/**
-	 * Uberjar (fat jar) file of Flink
+	 * Uberjar (fat jar) file of Flink.
 	 */
 	protected static File flinkUberjar;
 
-	protected static final Configuration yarnConfiguration;
+	protected static final Configuration YARN_CONFIGURATION;
 
 	/**
 	 * lib/ folder of the flink distribution.
@@ -131,27 +132,26 @@ public abstract class YarnTestBase extends TestLogger {
 	protected static File flinkLibFolder;
 
 	/**
-	 * Temporary folder where Flink configurations will be kept for secure run
+	 * Temporary folder where Flink configurations will be kept for secure run.
 	 */
 	protected static File tempConfPathForSecureRun = null;
 
 	static {
-		yarnConfiguration = new YarnConfiguration();
-		yarnConfiguration.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
-		yarnConfiguration.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); // 4096 is the available memory anyways
-		yarnConfiguration.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, true);
-		yarnConfiguration.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true);
-		yarnConfiguration.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
-		yarnConfiguration.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2);
-		yarnConfiguration.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4);
-		yarnConfiguration.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600);
-		yarnConfiguration.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false);
-		yarnConfiguration.setInt(YarnConfiguration.NM_VCORES, 666); // memory is overwritten in the MiniYARNCluster.
+		YARN_CONFIGURATION = new YarnConfiguration();
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 4096); // 4096 is the available memory anyways
+		YARN_CONFIGURATION.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, true);
+		YARN_CONFIGURATION.setBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, true);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 4);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.DEBUG_NM_DELETE_DELAY_SEC, 3600);
+		YARN_CONFIGURATION.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, false);
+		YARN_CONFIGURATION.setInt(YarnConfiguration.NM_VCORES, 666); // memory is overwritten in the MiniYARNCluster.
 		// so we have to change the number of cores for testing.
-		yarnConfiguration.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); // 20 seconds expiry (to ensure we properly heartbeat with YARN).
+		YARN_CONFIGURATION.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 20000); // 20 seconds expiry (to ensure we properly heartbeat with YARN).
 	}
 
-
 	public static void populateYarnSecureConfigurations(Configuration conf, String principal, String keytab) {
 
 		conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
@@ -163,15 +163,15 @@ public abstract class YarnTestBase extends TestLogger {
 		conf.set(YarnConfiguration.NM_PRINCIPAL, principal);
 
 		conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal);
-		conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,keytab);
+		conf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab);
 		conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal);
-		conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,keytab);
+		conf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytab);
 
-		conf.set("hadoop.security.auth_to_local","RULE:[1:$1] RULE:[2:$1]");
+		conf.set("hadoop.security.auth_to_local", "RULE:[1:$1] RULE:[2:$1]");
 	}
 
 	/**
-	 * Sleep a bit between the tests (we are re-using the YARN cluster for the tests)
+	 * Sleep a bit between the tests (we are re-using the YARN cluster for the tests).
 	 */
 	@After
 	public void sleep() {
@@ -185,37 +185,37 @@ public abstract class YarnTestBase extends TestLogger {
 	private YarnClient yarnClient = null;
 	@Before
 	public void checkClusterEmpty() throws IOException, YarnException {
-		if(yarnClient == null) {
+		if (yarnClient == null) {
 			yarnClient = YarnClient.createYarnClient();
-			yarnClient.init(yarnConfiguration);
+			yarnClient.init(YARN_CONFIGURATION);
 			yarnClient.start();
 		}
 
 		List<ApplicationReport> apps = yarnClient.getApplications();
-		for(ApplicationReport app : apps) {
-			if(app.getYarnApplicationState() != YarnApplicationState.FINISHED
+		for (ApplicationReport app : apps) {
+			if (app.getYarnApplicationState() != YarnApplicationState.FINISHED
 					&& app.getYarnApplicationState() != YarnApplicationState.KILLED
 					&& app.getYarnApplicationState() != YarnApplicationState.FAILED) {
 				Assert.fail("There is at least one application on the cluster is not finished." +
-						"App "+app.getApplicationId()+" is in state "+app.getYarnApplicationState());
+						"App " + app.getApplicationId() + " is in state " + app.getYarnApplicationState());
 			}
 		}
 	}
 
 	/**
-	 * Locate a file or directory
+	 * Locate a file or directory.
 	 */
 	public static File findFile(String startAt, FilenameFilter fnf) {
 		File root = new File(startAt);
 		String[] files = root.list();
-		if(files == null) {
+		if (files == null) {
 			return null;
 		}
-		for(String file : files) {
+		for (String file : files) {
 			File f = new File(startAt + File.separator + file);
-			if(f.isDirectory()) {
+			if (f.isDirectory()) {
 				File r = findFile(f.getAbsolutePath(), fnf);
-				if(r != null) {
+				if (r != null) {
 					return r;
 				}
 			} else if (fnf.accept(f.getParentFile(), f.getName())) {
@@ -235,6 +235,10 @@ public abstract class YarnTestBase extends TestLogger {
 		}
 	}
 
+	/**
+	 * A simple {@link FilenameFilter} that only accepts files if their name contains every string in the array passed
+	 * to the constructor.
+	 */
 	public static class ContainsName implements FilenameFilter {
 		private String[] names;
 		private String excludeInPath = null;
@@ -253,16 +257,16 @@ public abstract class YarnTestBase extends TestLogger {
 
 		@Override
 		public boolean accept(File dir, String name) {
-			if(excludeInPath == null) {
-				for(String n: names) {
-					if(!name.contains(n)) {
+			if (excludeInPath == null) {
+				for (String n: names) {
+					if (!name.contains(n)) {
 						return false;
 					}
 				}
 				return true;
 			} else {
-				for(String n: names) {
-					if(!name.contains(n)) {
+				for (String n: names) {
+					if (!name.contains(n)) {
 						return false;
 					}
 				}
@@ -286,27 +290,27 @@ public abstract class YarnTestBase extends TestLogger {
 	 * This method checks the written TaskManager and JobManager log files
 	 * for exceptions.
 	 *
-	 * WARN: Please make sure the tool doesn't find old logfiles from previous test runs.
+	 * <p>WARN: Please make sure the tool doesn't find old logfiles from previous test runs.
 	 * So always run "mvn clean" before running the tests here.
 	 *
 	 */
 	public static void ensureNoProhibitedStringInLogFiles(final String[] prohibited, final String[] whitelisted) {
-		File cwd = new File("target/" + yarnConfiguration.get(TEST_CLUSTER_NAME_KEY));
+		File cwd = new File("target/" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
 		Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to exist", cwd.exists());
 		Assert.assertTrue("Expecting directory " + cwd.getAbsolutePath() + " to be a directory", cwd.isDirectory());
-		
+
 		File foundFile = findFile(cwd.getAbsolutePath(), new FilenameFilter() {
 			@Override
 			public boolean accept(File dir, String name) {
 			// scan each file for prohibited strings.
-			File f = new File(dir.getAbsolutePath()+ "/" + name);
+			File f = new File(dir.getAbsolutePath() + "/" + name);
 			try {
 				Scanner scanner = new Scanner(f);
 				while (scanner.hasNextLine()) {
 					final String lineFromFile = scanner.nextLine();
 					for (String aProhibited : prohibited) {
 						if (lineFromFile.contains(aProhibited)) {
-							
+
 							boolean whitelistedFound = false;
 							for (String white : whitelisted) {
 								if (lineFromFile.contains(white)) {
@@ -314,7 +318,7 @@ public abstract class YarnTestBase extends TestLogger {
 									break;
 								}
 							}
-							
+
 							if (!whitelistedFound) {
 								// logging in FATAL to see the actual message in TRAVIS tests.
 								Marker fatal = MarkerFactory.getMarker("FATAL");
@@ -326,24 +330,24 @@ public abstract class YarnTestBase extends TestLogger {
 
 				}
 			} catch (FileNotFoundException e) {
-				LOG.warn("Unable to locate file: "+e.getMessage()+" file: "+f.getAbsolutePath());
+				LOG.warn("Unable to locate file: " + e.getMessage() + " file: " + f.getAbsolutePath());
 			}
 
 			return false;
 			}
 		});
-		if(foundFile != null) {
+		if (foundFile != null) {
 			Scanner scanner =  null;
 			try {
 				scanner = new Scanner(foundFile);
 			} catch (FileNotFoundException e) {
-				Assert.fail("Unable to locate file: "+e.getMessage()+" file: "+foundFile.getAbsolutePath());
+				Assert.fail("Unable to locate file: " + e.getMessage() + " file: " + foundFile.getAbsolutePath());
 			}
 			LOG.warn("Found a file with a prohibited string. Printing contents:");
 			while (scanner.hasNextLine()) {
-				LOG.warn("LINE: "+scanner.nextLine());
+				LOG.warn("LINE: " + scanner.nextLine());
 			}
-			Assert.fail("Found a file "+foundFile+" with a prohibited string: "+Arrays.toString(prohibited));
+			Assert.fail("Found a file " + foundFile + " with a prohibited string: " + Arrays.toString(prohibited));
 		}
 	}
 
@@ -351,13 +355,13 @@ public abstract class YarnTestBase extends TestLogger {
 		try {
 			Thread.sleep(time);
 		} catch (InterruptedException e) {
-			LOG.warn("Interruped",e);
+			LOG.warn("Interruped", e);
 		}
 	}
 
 	public static int getRunningContainers() {
 		int count = 0;
-		for(int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
+		for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
 			NodeManager nm = yarnCluster.getNodeManager(nmId);
 			ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers();
 			count += containers.size();
@@ -370,7 +374,7 @@ public abstract class YarnTestBase extends TestLogger {
 	}
 
 	public static void startYARNWithConfig(Configuration conf) {
-		start(conf,null,null);
+		start(conf, null, null);
 	}
 
 	private static void start(Configuration conf, String principal, String keytab) {
@@ -411,17 +415,17 @@ public abstract class YarnTestBase extends TestLogger {
 			File flinkConfDirPath = findFile(flinkDistRootDir, new ContainsName(new String[]{"flink-conf.yaml"}));
 			Assert.assertNotNull(flinkConfDirPath);
 
-			if(!StringUtils.isBlank(principal) && !StringUtils.isBlank(keytab)) {
+			if (!StringUtils.isBlank(principal) && !StringUtils.isBlank(keytab)) {
 				//copy conf dir to test temporary workspace location
 				tempConfPathForSecureRun = tmp.newFolder("conf");
 
 				String confDirPath = flinkConfDirPath.getParentFile().getAbsolutePath();
 				FileUtils.copyDirectory(new File(confDirPath), tempConfPathForSecureRun);
 
-				try(FileWriter fw = new FileWriter(new File(tempConfPathForSecureRun,"flink-conf.yaml"), true);
+				try (FileWriter fw = new FileWriter(new File(tempConfPathForSecureRun, "flink-conf.yaml"), true);
 					BufferedWriter bw = new BufferedWriter(fw);
-					PrintWriter out = new PrintWriter(bw))
-				{
+					PrintWriter out = new PrintWriter(bw)) {
+
 					LOG.info("writing keytab: " + keytab + " and principal: " + principal + " to config file");
 					out.println("");
 					out.println("#Security Configurations Auto Populated ");
@@ -452,7 +456,7 @@ public abstract class YarnTestBase extends TestLogger {
 			Assert.assertTrue(yarnCluster.getServiceState() == Service.STATE.STARTED);
 
 			// wait for the nodeManagers to connect
-			while(!yarnCluster.waitForNodeManagersToConnect(500)) {
+			while (!yarnCluster.waitForNodeManagersToConnect(500)) {
 				LOG.info("Waiting for Nodemanagers to connect");
 			}
 		} catch (Exception ex) {
@@ -468,7 +472,7 @@ public abstract class YarnTestBase extends TestLogger {
 	 */
 	@BeforeClass
 	public static void setup() {
-		startYARNWithConfig(yarnConfiguration);
+		startYARNWithConfig(YARN_CONFIGURATION);
 	}
 
 	// -------------------------- Runner -------------------------- //
@@ -490,24 +494,24 @@ public abstract class YarnTestBase extends TestLogger {
 		System.setOut(new PrintStream(outContent));
 		System.setErr(new PrintStream(errContent));
 
-		final int START_TIMEOUT_SECONDS = 60;
+		final int startTimeoutSeconds = 60;
 
 		Runner runner = new Runner(args, type, 0);
 		runner.setName("Frontend (CLI/YARN Client) runner thread (startWithArgs()).");
 		runner.start();
 
-		for(int second = 0; second <  START_TIMEOUT_SECONDS; second++) {
+		for (int second = 0; second <  startTimeoutSeconds; second++) {
 			sleep(1000);
 			// check output for correct TaskManager startup.
-			if(outContent.toString().contains(startedAfterString)
-					|| errContent.toString().contains(startedAfterString) ) {
+			if (outContent.toString().contains(startedAfterString)
+					|| errContent.toString().contains(startedAfterString)) {
 				LOG.info("Found expected output in redirected streams");
 				return runner;
 			}
 			// check if thread died
-			if(!runner.isAlive()) {
+			if (!runner.isAlive()) {
 				sendOutput();
-				if(runner.getRunnerError() != null) {
+				if (runner.getRunnerError() != null) {
 					throw new RuntimeException("Runner failed with exception.", runner.getRunnerError());
 				}
 				Assert.fail("Runner thread died before the test was finished.");
@@ -515,14 +519,15 @@ public abstract class YarnTestBase extends TestLogger {
 		}
 
 		sendOutput();
-		Assert.fail("During the timeout period of " + START_TIMEOUT_SECONDS + " seconds the " +
+		Assert.fail("During the timeout period of " + startTimeoutSeconds + " seconds the " +
 				"expected string did not show up");
 		return null;
 	}
 
 	protected void runWithArgs(String[] args, String terminateAfterString, String[] failOnStrings, RunTypes type, int returnCode) {
-		runWithArgs(args,terminateAfterString, failOnStrings, type, returnCode, false);
+		runWithArgs(args, terminateAfterString, failOnStrings, type, returnCode, false);
 	}
+
 	/**
 	 * The test has been passed once the "terminateAfterString" has been seen.
 	 * @param args Command line arguments for the runner
@@ -540,11 +545,10 @@ public abstract class YarnTestBase extends TestLogger {
 		System.setOut(new PrintStream(outContent));
 		System.setErr(new PrintStream(errContent));
 
-
 		// we wait for at most three minutes
-		final int START_TIMEOUT_SECONDS = 180;
-		final long deadline = System.currentTimeMillis() + (START_TIMEOUT_SECONDS * 1000);
-		
+		final int startTimeoutSeconds = 180;
+		final long deadline = System.currentTimeMillis() + (startTimeoutSeconds * 1000);
+
 		Runner runner = new Runner(args, type, expectedReturnValue);
 		runner.start();
 
@@ -554,7 +558,7 @@ public abstract class YarnTestBase extends TestLogger {
 			sleep(1000);
 			String outContentString = outContent.toString();
 			String errContentString = errContent.toString();
-			if(failOnPatterns != null) {
+			if (failOnPatterns != null) {
 				for (String failOnString : failOnPatterns) {
 					Pattern pattern = Pattern.compile(failOnString);
 					if (pattern.matcher(outContentString).find() || pattern.matcher(errContentString).find()) {
@@ -567,16 +571,16 @@ public abstract class YarnTestBase extends TestLogger {
 				}
 			}
 			// check output for the expected terminateAfterString.
-			if(checkLogForTerminateString) {
+			if (checkLogForTerminateString) {
 				LoggingEvent matchedEvent = UtilsTest.getEventContainingString(terminateAfterString);
-				if(matchedEvent != null) {
+				if (matchedEvent != null) {
 					testPassedFromLog4j = true;
 					LOG.info("Found expected output in logging event {}", matchedEvent);
 				}
 
 			}
 
-			if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j ) {
+			if (outContentString.contains(terminateAfterString) || errContentString.contains(terminateAfterString) || testPassedFromLog4j) {
 				expectedStringSeen = true;
 				LOG.info("Found expected output in redirected streams");
 				// send "stop" command to command line interface
@@ -600,28 +604,31 @@ public abstract class YarnTestBase extends TestLogger {
 			}
 		}
 		while (runner.getRunnerError() == null && !expectedStringSeen && System.currentTimeMillis() < deadline);
-		
+
 		sendOutput();
 
-		if(runner.getRunnerError() != null) {
+		if (runner.getRunnerError() != null) {
 			// this lets the test fail.
 			throw new RuntimeException("Runner failed", runner.getRunnerError());
 		}
-		Assert.assertTrue("During the timeout period of " + START_TIMEOUT_SECONDS + " seconds the " +
+		Assert.assertTrue("During the timeout period of " + startTimeoutSeconds + " seconds the " +
 				"expected string did not show up", expectedStringSeen);
 
 		LOG.info("Test was successful");
 	}
 
 	protected static void sendOutput() {
-		System.setOut(originalStdout);
-		System.setErr(originalStderr);
+		System.setOut(ORIGINAL_STDOUT);
+		System.setErr(ORIGINAL_STDERR);
 
 		LOG.info("Sending stdout content through logger: \n\n{}\n\n", outContent.toString());
 		LOG.info("Sending stderr content through logger: \n\n{}\n\n", errContent.toString());
 	}
 
-	public static class Runner extends Thread {
+	/**
+	 * Utility class to run yarn jobs.
+	 */
+	protected static class Runner extends Thread {
 		private final String[] args;
 		private final int expectedReturnValue;
 		private RunTypes type;
@@ -634,7 +641,6 @@ public abstract class YarnTestBase extends TestLogger {
 			this.expectedReturnValue = expectedReturnValue;
 		}
 
-
 		@Override
 		public void run() {
 			try {
@@ -682,9 +688,9 @@ public abstract class YarnTestBase extends TestLogger {
 			}
 		}
 
-		/** Stops the Yarn session */
+		/** Stops the Yarn session. */
 		public void sendStop() {
-			if(yCli != null) {
+			if (yCli != null) {
 				yCli.stop();
 			}
 		}
@@ -709,7 +715,7 @@ public abstract class YarnTestBase extends TestLogger {
 		map.remove("IN_TESTS");
 		TestBaseUtils.setEnv(map);
 
-		if(tempConfPathForSecureRun != null) {
+		if (tempConfPathForSecureRun != null) {
 			FileUtil.fullyDelete(tempConfPathForSecureRun);
 			tempConfPathForSecureRun = null;
 		}
@@ -718,9 +724,9 @@ public abstract class YarnTestBase extends TestLogger {
 		// to <flinkRoot>/target/flink-yarn-tests-*.
 		// The files from there are picked up by the ./tools/travis_watchdog.sh script
 		// to upload them to Amazon S3.
-		if(isOnTravis()) {
-			File target = new File("../target" + yarnConfiguration.get(TEST_CLUSTER_NAME_KEY));
-			if(!target.mkdirs()) {
+		if (isOnTravis()) {
+			File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
+			if (!target.mkdirs()) {
 				LOG.warn("Error creating dirs to {}", target);
 			}
 			File src = tmp.getRoot();
@@ -762,6 +768,5 @@ public abstract class YarnTestBase extends TestLogger {
 			return spiedClusterClient;
 		}
 
-
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/0e69dd5c/flink-yarn-tests/src/test/scala/org/apache/flink/yarn/TestingYarnTaskManager.scala
----------------------------------------------------------------------
diff --git a/flink-yarn-tests/src/test/scala/org/apache/flink/yarn/TestingYarnTaskManager.scala b/flink-yarn-tests/src/test/scala/org/apache/flink/yarn/TestingYarnTaskManager.scala
index 1df4b8d..a03f365 100644
--- a/flink-yarn-tests/src/test/scala/org/apache/flink/yarn/TestingYarnTaskManager.scala
+++ b/flink-yarn-tests/src/test/scala/org/apache/flink/yarn/TestingYarnTaskManager.scala
@@ -22,7 +22,6 @@ import org.apache.flink.runtime.clusterframework.types.ResourceID
 import org.apache.flink.runtime.highavailability.HighAvailabilityServices
 import org.apache.flink.runtime.io.disk.iomanager.IOManager
 import org.apache.flink.runtime.io.network.NetworkEnvironment
-import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService
 import org.apache.flink.runtime.memory.MemoryManager
 import org.apache.flink.runtime.metrics.MetricRegistry
 import org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration
@@ -79,4 +78,3 @@ class TestingYarnTaskManager(
   }
 }
 
-


[5/7] flink git commit: [hotfix] Rename exampleJavaPrograms package

Posted by ch...@apache.org.
[hotfix] Rename exampleJavaPrograms package

This closes #3986.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/1cd0ee74
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/1cd0ee74
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/1cd0ee74

Branch: refs/heads/master
Commit: 1cd0ee74af3e9b27d5c3a257cbd9ddbb12c9df89
Parents: 789ed8a
Author: Greg Hogan <co...@greghogan.com>
Authored: Wed May 24 15:31:45 2017 -0400
Committer: zentol <ch...@apache.org>
Committed: Thu May 25 09:48:54 2017 +0200

----------------------------------------------------------------------
 .../iteration/IterateExampleITCase.java         | 51 ------------
 .../join/WindowJoinData.java                    | 64 ---------------
 .../join/WindowJoinITCase.java                  | 84 --------------------
 .../ml/IncrementalLearningSkeletonITCase.java   | 45 -----------
 .../twitter/TwitterStreamITCase.java            | 45 -----------
 .../windowing/SessionWindowingITCase.java       | 45 -----------
 .../TopSpeedWindowingExampleITCase.java         | 50 ------------
 .../windowing/WindowWordCountITCase.java        | 57 -------------
 .../wordcount/PojoExampleITCase.java            | 50 ------------
 .../wordcount/WordCountITCase.java              | 50 ------------
 .../TopSpeedWindowingExampleITCase.java         | 50 ------------
 .../iteration/IterateExampleITCase.java         | 51 ++++++++++++
 .../test/examples/join/WindowJoinData.java      | 64 +++++++++++++++
 .../test/examples/join/WindowJoinITCase.java    | 84 ++++++++++++++++++++
 .../ml/IncrementalLearningSkeletonITCase.java   | 45 +++++++++++
 .../examples/twitter/TwitterStreamITCase.java   | 45 +++++++++++
 .../windowing/SessionWindowingITCase.java       | 45 +++++++++++
 .../TopSpeedWindowingExampleITCase.java         | 50 ++++++++++++
 .../windowing/WindowWordCountITCase.java        | 57 +++++++++++++
 .../examples/wordcount/PojoExampleITCase.java   | 50 ++++++++++++
 .../examples/wordcount/WordCountITCase.java     | 50 ++++++++++++
 .../TopSpeedWindowingExampleITCase.java         | 50 ++++++++++++
 .../scala/examples/WindowJoinITCase.scala       |  2 +-
 23 files changed, 592 insertions(+), 592 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
deleted file mode 100644
index 2f9af69..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/iteration/IterateExampleITCase.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.iteration;
-
-import org.apache.flink.streaming.examples.iteration.IterateExample;
-import org.apache.flink.streaming.examples.iteration.util.IterateExampleData;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link IterateExample}.
- */
-public class IterateExampleITCase extends StreamingProgramTestBase {
-
-	protected String inputPath;
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		inputPath = createTempFile("fibonacciInput.txt", IterateExampleData.INPUT_PAIRS);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		// the example is inherently non-deterministic. The iteration timeout of 5000 ms
-		// is frequently not enough to make the test run stable on CI infrastructure
-		// with very small containers, so we cannot do a validation here
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		IterateExample.main(new String[]{
-				"--input", inputPath,
-				"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
deleted file mode 100644
index 6b4738a..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinData.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.join;
-
-/**
- * Class with sample data for window join examples.
- */
-public class WindowJoinData {
-
-	public static final String GRADES_INPUT = "0,john,5\n" + "0,tom,3\n" + "0,alice,1\n" + "0,grace,5\n" +
-			"1,john,4\n" + "1,bob,1\n" + "1,alice,2\n" + "1,alice,3\n" + "1,bob,5\n" + "1,alice,3\n" + "1,tom,5\n" +
-			"2,john,2\n" + "2,john,1\n" + "2,grace,2\n" + "2,jerry,2\n" + "2,tom,4\n" + "2,bob,4\n" + "2,bob,2\n" +
-			"3, tom,2\n" + "3,alice,5\n" + "3,grace,5\n" + "3,grace,1\n" + "3,alice,1\n" + "3,grace,3\n" + "3,tom,1\n" +
-			"4,jerry,5\n" + "4,john,3\n" + "4,john,4\n" + "4,john,1\n" + "4,jerry,3\n" + "4,grace,3\n" + "4,bob,3\n" +
-			"5,john,3\n" + "5,jerry,4\n" + "5,tom,5\n" + "5,tom,4\n" + "5,john,2\n" + "5,jerry,1\n" + "5,bob,1\n" +
-			"6,john,5\n" + "6,grace,4\n" + "6,tom,5\n" + "6,john,4\n" + "6,tom,1\n" + "6,grace,1\n" + "6,john,2\n" +
-			"7,jerry,3\n" + "7,jerry,5\n" + "7,tom,2\n" + "7,tom,2\n" + "7,alice,4\n" + "7,tom,4\n" + "7,jerry,4\n" +
-			"8,john,3\n" + "8,grace,4\n" + "8,tom,3\n" + "8,jerry,4\n" + "8,john,5\n" + "8,john,4\n" + "8,jerry,1\n" +
-			"9,john,5\n" + "9,alice,2\n" + "9,tom,1\n" + "9,alice,5\n" + "9,grace,4\n" + "9,bob,4\n" + "9,jerry,1\n" +
-			"10,john,5\n" + "10,tom,4\n" + "10,tom,5\n" + "10,jerry,5\n" + "10,tom,1\n" + "10,grace,3\n" + "10,bob,5\n" +
-			"11,john,1\n" + "11,alice,1\n" + "11,grace,3\n" + "11,grace,1\n" + "11,jerry,1\n" + "11,jerry,4\n" +
-			"12,bob,4\n" + "12,alice,3\n" + "12,tom,5\n" + "12,alice,4\n" + "12,alice,4\n" + "12,grace,4\n" + "12,john,5\n" +
-			"13,john,5\n" + "13,grace,4\n" + "13,tom,4\n" + "13,john,4\n" + "13,john,5\n" + "13,alice,5\n" + "13,jerry,5\n" +
-			"14,john,3\n" + "14,tom,5\n" + "14,jerry,4\n" + "14,grace,4\n" + "14,john,3\n" + "14,bob,2";
-
-	public static final String SALARIES_INPUT = "0,john,6469\n" + "0,jerry,6760\n" + "0,jerry,8069\n" +
-			"1,tom,3662\n" + "1,grace,8427\n" + "1,john,9425\n" + "1,bob,9018\n" + "1,john,352\n" + "1,tom,3770\n" +
-			"2,grace,7622\n" + "2,jerry,7441\n" + "2,alice,1468\n" + "2,bob,5472\n" + "2,grace,898\n" +
-			"3,tom,3849\n" + "3,grace,1865\n" + "3,alice,5582\n" + "3,john,9511\n" + "3,alice,1541\n" +
-			"4,john,2477\n" + "4,grace,3561\n" + "4,john,1670\n" + "4,grace,7290\n" + "4,grace,6565\n" +
-			"5,tom,6179\n" + "5,tom,1601\n" + "5,john,2940\n" + "5,bob,4685\n" + "5,bob,710\n" + "5,bob,5936\n" +
-			"6,jerry,1412\n" + "6,grace,6515\n" + "6,grace,3321\n" + "6,tom,8088\n" + "6,john,2876\n" +
-			"7,bob,9896\n" + "7,grace,7368\n" + "7,grace,9749\n" + "7,bob,2048\n" + "7,alice,4782\n" +
-			"8,alice,3375\n" + "8,tom,5841\n" + "8,bob,958\n" + "8,bob,5258\n" + "8,tom,3935\n" + "8,jerry,4394\n" +
-			"9,alice,102\n" + "9,alice,4931\n" + "9,alice,5240\n" + "9,jerry,7951\n" + "9,john,5675\n" +
-			"10,bob,609\n" + "10,alice,5997\n" + "10,jerry,9651\n" + "10,alice,1328\n" + "10,bob,1022\n" +
-			"11,grace,2578\n" + "11,jerry,9704\n" + "11,tom,4476\n" + "11,grace,3784\n" + "11,alice,6144\n" +
-			"12,bob,6213\n" + "12,alice,7525\n" + "12,jerry,2908\n" + "12,grace,8464\n" + "12,jerry,9920\n" +
-			"13,bob,3720\n" + "13,bob,7612\n" + "13,alice,7211\n" + "13,jerry,6484\n" + "13,alice,1711\n" +
-			"14,jerry,5994\n" + "14,grace,928\n" + "14,jerry,2492\n" + "14,grace,9080\n" + "14,tom,4330\n" +
-			"15,bob,8302\n" + "15,john,4981\n" + "15,tom,1781\n" + "15,grace,1379\n" + "15,jerry,3700\n" +
-			"16,jerry,3584\n" + "16,jerry,2038\n" + "16,jerry,3902\n" + "16,tom,1336\n" + "16,jerry,7500\n" +
-			"17,tom,3648\n" + "17,alice,2533\n" + "17,tom,8685\n" + "17,bob,3968\n" + "17,tom,3241\n" + "17,bob,7461\n" +
-			"18,jerry,2138\n" + "18,alice,7503\n" + "18,alice,6424\n" + "18,tom,140\n" + "18,john,9802\n" +
-			"19,grace,2977\n" + "19,grace,889\n" + "19,john,1338";
-
-	/** Utility class, should not be instantiated. */
-	private WindowJoinData() {}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
deleted file mode 100644
index 264ce55..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/join/WindowJoinITCase.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.join;
-
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.core.fs.FileSystem.WriteMode;
-import org.apache.flink.streaming.api.TimeCharacteristic;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.streaming.examples.join.WindowJoin;
-import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase;
-
-import org.apache.commons.io.FileUtils;
-import org.junit.Test;
-
-import java.io.File;
-
-/**
- * Tests for {@link WindowJoin}.
- */
-@SuppressWarnings("serial")
-public class WindowJoinITCase extends StreamingMultipleProgramsTestBase {
-
-	@Test
-	public void testProgram() throws Exception {
-		final String resultPath = File.createTempFile("result-path", "dir").toURI().toString();
-		try {
-			final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-			env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
-
-			DataStream<Tuple2<String, Integer>> grades = env
-					.fromElements(WindowJoinData.GRADES_INPUT.split("\n"))
-					.map(new Parser());
-
-			DataStream<Tuple2<String, Integer>> salaries = env
-					.fromElements(WindowJoinData.SALARIES_INPUT.split("\n"))
-					.map(new Parser());
-
-			WindowJoin
-					.runWindowJoin(grades, salaries, 100)
-					.writeAsText(resultPath, WriteMode.OVERWRITE);
-
-			env.execute();
-
-			// since the two sides of the join might have different speed
-			// the exact output can not be checked just whether it is well-formed
-			// checks that the result lines look like e.g. (bob, 2, 2015)
-			checkLinesAgainstRegexp(resultPath, "^\\([a-z]+,(\\d),(\\d)+\\)");
-		}
-		finally {
-			try {
-				FileUtils.deleteDirectory(new File(resultPath));
-			} catch (Throwable ignored) {}
-		}
-	}
-
-	//-------------------------------------------------------------------------
-
-	private static final class Parser implements MapFunction<String, Tuple2<String, Integer>> {
-
-		@Override
-		public Tuple2<String, Integer> map(String value) throws Exception {
-			String[] fields = value.split(",");
-			return new Tuple2<>(fields[1], Integer.parseInt(fields[2]));
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
deleted file mode 100644
index 90f6845..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/ml/IncrementalLearningSkeletonITCase.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.ml;
-
-import org.apache.flink.streaming.examples.ml.IncrementalLearningSkeleton;
-import org.apache.flink.streaming.examples.ml.util.IncrementalLearningSkeletonData;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link IncrementalLearningSkeleton}.
- */
-public class IncrementalLearningSkeletonITCase extends StreamingProgramTestBase {
-
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(IncrementalLearningSkeletonData.RESULTS, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		IncrementalLearningSkeleton.main(new String[]{"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
deleted file mode 100644
index 2be83c6..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/twitter/TwitterStreamITCase.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.twitter;
-
-import org.apache.flink.streaming.examples.twitter.TwitterExample;
-import org.apache.flink.streaming.examples.twitter.util.TwitterExampleData;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link TwitterExample}.
- */
-public class TwitterStreamITCase extends StreamingProgramTestBase {
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(TwitterExampleData.STREAMING_COUNTS_AS_TUPLES, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		TwitterExample.main(new String[]{"--output", resultPath});
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
deleted file mode 100644
index a3c3175..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/SessionWindowingITCase.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.windowing;
-
-import org.apache.flink.streaming.examples.windowing.SessionWindowing;
-import org.apache.flink.streaming.examples.windowing.util.SessionWindowingData;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link SessionWindowing}.
- */
-public class SessionWindowingITCase extends StreamingProgramTestBase {
-
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(SessionWindowingData.EXPECTED, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		SessionWindowing.main(new String[]{"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
deleted file mode 100644
index e9f9f51..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/TopSpeedWindowingExampleITCase.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.windowing;
-
-import org.apache.flink.streaming.examples.windowing.TopSpeedWindowing;
-import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link TopSpeedWindowing}.
- */
-public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
-
-	protected String textPath;
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		setParallelism(1); //needed to ensure total ordering for windows
-		textPath = createTempFile("text.txt", TopSpeedWindowingExampleData.CAR_DATA);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(TopSpeedWindowingExampleData.TOP_SPEEDS, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		TopSpeedWindowing.main(new String[]{
-				"--input", textPath,
-				"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
deleted file mode 100644
index ad36582..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/windowing/WindowWordCountITCase.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.windowing;
-
-import org.apache.flink.streaming.examples.windowing.WindowWordCount;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-import org.apache.flink.test.testdata.WordCountData;
-
-/**
- * Tests for {@link WindowWordCount}.
- */
-public class WindowWordCountITCase extends StreamingProgramTestBase {
-
-	protected String textPath;
-	protected String resultPath;
-	protected String windowSize = "250";
-	protected String slideSize = "150";
-
-	@Override
-	protected void preSubmit() throws Exception {
-		textPath = createTempFile("text.txt", WordCountData.TEXT);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		// since the parallel tokenizers might have different speed
-		// the exact output can not be checked just whether it is well-formed
-		// checks that the result lines look like e.g. (faust, 2)
-		checkLinesAgainstRegexp(resultPath, "^\\([a-z]+,(\\d)+\\)");
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		WindowWordCount.main(new String[]{
-				"--input", textPath,
-				"--output", resultPath,
-				"--window", windowSize,
-				"--slide", slideSize});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
deleted file mode 100644
index 609b69d..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/PojoExampleITCase.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.wordcount;
-
-import org.apache.flink.streaming.examples.wordcount.PojoExample;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-import org.apache.flink.test.testdata.WordCountData;
-
-/**
- * Tests for {@link PojoExample}.
- */
-public class PojoExampleITCase extends StreamingProgramTestBase {
-
-	protected String textPath;
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		textPath = createTempFile("text.txt", WordCountData.TEXT);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		PojoExample.main(new String[]{
-				"--input", textPath,
-				"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
deleted file mode 100644
index ef57794..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleJavaPrograms/wordcount/WordCountITCase.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleJavaPrograms.wordcount;
-
-import org.apache.flink.streaming.examples.wordcount.WordCount;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-import org.apache.flink.test.testdata.WordCountData;
-
-/**
- * Tests for {@link WordCount}.
- */
-public class WordCountITCase extends StreamingProgramTestBase {
-
-	protected String textPath;
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		textPath = createTempFile("text.txt", WordCountData.TEXT);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		WordCount.main(new String[]{
-				"--input", textPath,
-				"--output", resultPath});
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
deleted file mode 100644
index c174429..0000000
--- a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/exampleScalaPrograms/windowing/TopSpeedWindowingExampleITCase.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.streaming.test.exampleScalaPrograms.windowing;
-
-import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
-import org.apache.flink.streaming.scala.examples.windowing.TopSpeedWindowing;
-import org.apache.flink.streaming.util.StreamingProgramTestBase;
-
-/**
- * Tests for {@link TopSpeedWindowing}.
- */
-public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
-	protected String textPath;
-	protected String resultPath;
-
-	@Override
-	protected void preSubmit() throws Exception {
-		setParallelism(1); //needed to ensure total ordering for windows
-		textPath = createTempFile("text.txt", TopSpeedWindowingExampleData.CAR_DATA);
-		resultPath = getTempDirPath("result");
-	}
-
-	@Override
-	protected void postSubmit() throws Exception {
-		compareResultsByLinesInMemory(TopSpeedWindowingExampleData.TOP_CASE_CLASS_SPEEDS, resultPath);
-	}
-
-	@Override
-	protected void testProgram() throws Exception {
-		TopSpeedWindowing.main(new String[]{
-				"--input", textPath,
-				"--output", resultPath});
-
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/iteration/IterateExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/iteration/IterateExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/iteration/IterateExampleITCase.java
new file mode 100644
index 0000000..e6ff1d4
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/iteration/IterateExampleITCase.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.iteration;
+
+import org.apache.flink.streaming.examples.iteration.IterateExample;
+import org.apache.flink.streaming.examples.iteration.util.IterateExampleData;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link IterateExample}.
+ */
+public class IterateExampleITCase extends StreamingProgramTestBase {
+
+	protected String inputPath;
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		inputPath = createTempFile("fibonacciInput.txt", IterateExampleData.INPUT_PAIRS);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		// the example is inherently non-deterministic. The iteration timeout of 5000 ms
+		// is frequently not enough to make the test run stable on CI infrastructure
+		// with very small containers, so we cannot do a validation here
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		IterateExample.main(new String[]{
+				"--input", inputPath,
+				"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinData.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinData.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinData.java
new file mode 100644
index 0000000..16e78f9
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinData.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.join;
+
+/**
+ * Class with sample data for window join examples.
+ */
+public class WindowJoinData {
+
+	public static final String GRADES_INPUT = "0,john,5\n" + "0,tom,3\n" + "0,alice,1\n" + "0,grace,5\n" +
+			"1,john,4\n" + "1,bob,1\n" + "1,alice,2\n" + "1,alice,3\n" + "1,bob,5\n" + "1,alice,3\n" + "1,tom,5\n" +
+			"2,john,2\n" + "2,john,1\n" + "2,grace,2\n" + "2,jerry,2\n" + "2,tom,4\n" + "2,bob,4\n" + "2,bob,2\n" +
+			"3, tom,2\n" + "3,alice,5\n" + "3,grace,5\n" + "3,grace,1\n" + "3,alice,1\n" + "3,grace,3\n" + "3,tom,1\n" +
+			"4,jerry,5\n" + "4,john,3\n" + "4,john,4\n" + "4,john,1\n" + "4,jerry,3\n" + "4,grace,3\n" + "4,bob,3\n" +
+			"5,john,3\n" + "5,jerry,4\n" + "5,tom,5\n" + "5,tom,4\n" + "5,john,2\n" + "5,jerry,1\n" + "5,bob,1\n" +
+			"6,john,5\n" + "6,grace,4\n" + "6,tom,5\n" + "6,john,4\n" + "6,tom,1\n" + "6,grace,1\n" + "6,john,2\n" +
+			"7,jerry,3\n" + "7,jerry,5\n" + "7,tom,2\n" + "7,tom,2\n" + "7,alice,4\n" + "7,tom,4\n" + "7,jerry,4\n" +
+			"8,john,3\n" + "8,grace,4\n" + "8,tom,3\n" + "8,jerry,4\n" + "8,john,5\n" + "8,john,4\n" + "8,jerry,1\n" +
+			"9,john,5\n" + "9,alice,2\n" + "9,tom,1\n" + "9,alice,5\n" + "9,grace,4\n" + "9,bob,4\n" + "9,jerry,1\n" +
+			"10,john,5\n" + "10,tom,4\n" + "10,tom,5\n" + "10,jerry,5\n" + "10,tom,1\n" + "10,grace,3\n" + "10,bob,5\n" +
+			"11,john,1\n" + "11,alice,1\n" + "11,grace,3\n" + "11,grace,1\n" + "11,jerry,1\n" + "11,jerry,4\n" +
+			"12,bob,4\n" + "12,alice,3\n" + "12,tom,5\n" + "12,alice,4\n" + "12,alice,4\n" + "12,grace,4\n" + "12,john,5\n" +
+			"13,john,5\n" + "13,grace,4\n" + "13,tom,4\n" + "13,john,4\n" + "13,john,5\n" + "13,alice,5\n" + "13,jerry,5\n" +
+			"14,john,3\n" + "14,tom,5\n" + "14,jerry,4\n" + "14,grace,4\n" + "14,john,3\n" + "14,bob,2";
+
+	public static final String SALARIES_INPUT = "0,john,6469\n" + "0,jerry,6760\n" + "0,jerry,8069\n" +
+			"1,tom,3662\n" + "1,grace,8427\n" + "1,john,9425\n" + "1,bob,9018\n" + "1,john,352\n" + "1,tom,3770\n" +
+			"2,grace,7622\n" + "2,jerry,7441\n" + "2,alice,1468\n" + "2,bob,5472\n" + "2,grace,898\n" +
+			"3,tom,3849\n" + "3,grace,1865\n" + "3,alice,5582\n" + "3,john,9511\n" + "3,alice,1541\n" +
+			"4,john,2477\n" + "4,grace,3561\n" + "4,john,1670\n" + "4,grace,7290\n" + "4,grace,6565\n" +
+			"5,tom,6179\n" + "5,tom,1601\n" + "5,john,2940\n" + "5,bob,4685\n" + "5,bob,710\n" + "5,bob,5936\n" +
+			"6,jerry,1412\n" + "6,grace,6515\n" + "6,grace,3321\n" + "6,tom,8088\n" + "6,john,2876\n" +
+			"7,bob,9896\n" + "7,grace,7368\n" + "7,grace,9749\n" + "7,bob,2048\n" + "7,alice,4782\n" +
+			"8,alice,3375\n" + "8,tom,5841\n" + "8,bob,958\n" + "8,bob,5258\n" + "8,tom,3935\n" + "8,jerry,4394\n" +
+			"9,alice,102\n" + "9,alice,4931\n" + "9,alice,5240\n" + "9,jerry,7951\n" + "9,john,5675\n" +
+			"10,bob,609\n" + "10,alice,5997\n" + "10,jerry,9651\n" + "10,alice,1328\n" + "10,bob,1022\n" +
+			"11,grace,2578\n" + "11,jerry,9704\n" + "11,tom,4476\n" + "11,grace,3784\n" + "11,alice,6144\n" +
+			"12,bob,6213\n" + "12,alice,7525\n" + "12,jerry,2908\n" + "12,grace,8464\n" + "12,jerry,9920\n" +
+			"13,bob,3720\n" + "13,bob,7612\n" + "13,alice,7211\n" + "13,jerry,6484\n" + "13,alice,1711\n" +
+			"14,jerry,5994\n" + "14,grace,928\n" + "14,jerry,2492\n" + "14,grace,9080\n" + "14,tom,4330\n" +
+			"15,bob,8302\n" + "15,john,4981\n" + "15,tom,1781\n" + "15,grace,1379\n" + "15,jerry,3700\n" +
+			"16,jerry,3584\n" + "16,jerry,2038\n" + "16,jerry,3902\n" + "16,tom,1336\n" + "16,jerry,7500\n" +
+			"17,tom,3648\n" + "17,alice,2533\n" + "17,tom,8685\n" + "17,bob,3968\n" + "17,tom,3241\n" + "17,bob,7461\n" +
+			"18,jerry,2138\n" + "18,alice,7503\n" + "18,alice,6424\n" + "18,tom,140\n" + "18,john,9802\n" +
+			"19,grace,2977\n" + "19,grace,889\n" + "19,john,1338";
+
+	/** Utility class, should not be instantiated. */
+	private WindowJoinData() {}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinITCase.java
new file mode 100644
index 0000000..525ff6f
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/join/WindowJoinITCase.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.join;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.core.fs.FileSystem.WriteMode;
+import org.apache.flink.streaming.api.TimeCharacteristic;
+import org.apache.flink.streaming.api.datastream.DataStream;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.examples.join.WindowJoin;
+import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.Test;
+
+import java.io.File;
+
+/**
+ * Tests for {@link WindowJoin}.
+ */
+@SuppressWarnings("serial")
+public class WindowJoinITCase extends StreamingMultipleProgramsTestBase {
+
+	@Test
+	public void testProgram() throws Exception {
+		final String resultPath = File.createTempFile("result-path", "dir").toURI().toString();
+		try {
+			final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
+			env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
+
+			DataStream<Tuple2<String, Integer>> grades = env
+					.fromElements(WindowJoinData.GRADES_INPUT.split("\n"))
+					.map(new Parser());
+
+			DataStream<Tuple2<String, Integer>> salaries = env
+					.fromElements(WindowJoinData.SALARIES_INPUT.split("\n"))
+					.map(new Parser());
+
+			WindowJoin
+					.runWindowJoin(grades, salaries, 100)
+					.writeAsText(resultPath, WriteMode.OVERWRITE);
+
+			env.execute();
+
+			// since the two sides of the join might have different speed
+			// the exact output can not be checked just whether it is well-formed
+			// checks that the result lines look like e.g. (bob, 2, 2015)
+			checkLinesAgainstRegexp(resultPath, "^\\([a-z]+,(\\d),(\\d)+\\)");
+		}
+		finally {
+			try {
+				FileUtils.deleteDirectory(new File(resultPath));
+			} catch (Throwable ignored) {}
+		}
+	}
+
+	//-------------------------------------------------------------------------
+
+	private static final class Parser implements MapFunction<String, Tuple2<String, Integer>> {
+
+		@Override
+		public Tuple2<String, Integer> map(String value) throws Exception {
+			String[] fields = value.split(",");
+			return new Tuple2<>(fields[1], Integer.parseInt(fields[2]));
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/ml/IncrementalLearningSkeletonITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/ml/IncrementalLearningSkeletonITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/ml/IncrementalLearningSkeletonITCase.java
new file mode 100644
index 0000000..d5b160d
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/ml/IncrementalLearningSkeletonITCase.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.ml;
+
+import org.apache.flink.streaming.examples.ml.IncrementalLearningSkeleton;
+import org.apache.flink.streaming.examples.ml.util.IncrementalLearningSkeletonData;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link IncrementalLearningSkeleton}.
+ */
+public class IncrementalLearningSkeletonITCase extends StreamingProgramTestBase {
+
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(IncrementalLearningSkeletonData.RESULTS, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		IncrementalLearningSkeleton.main(new String[]{"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/twitter/TwitterStreamITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/twitter/TwitterStreamITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/twitter/TwitterStreamITCase.java
new file mode 100644
index 0000000..7f3b440
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/twitter/TwitterStreamITCase.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.twitter;
+
+import org.apache.flink.streaming.examples.twitter.TwitterExample;
+import org.apache.flink.streaming.examples.twitter.util.TwitterExampleData;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link TwitterExample}.
+ */
+public class TwitterStreamITCase extends StreamingProgramTestBase {
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(TwitterExampleData.STREAMING_COUNTS_AS_TUPLES, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		TwitterExample.main(new String[]{"--output", resultPath});
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/SessionWindowingITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/SessionWindowingITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/SessionWindowingITCase.java
new file mode 100644
index 0000000..768ed11
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/SessionWindowingITCase.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.windowing;
+
+import org.apache.flink.streaming.examples.windowing.SessionWindowing;
+import org.apache.flink.streaming.examples.windowing.util.SessionWindowingData;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link SessionWindowing}.
+ */
+public class SessionWindowingITCase extends StreamingProgramTestBase {
+
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(SessionWindowingData.EXPECTED, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		SessionWindowing.main(new String[]{"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/TopSpeedWindowingExampleITCase.java
new file mode 100644
index 0000000..0d869b8
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/TopSpeedWindowingExampleITCase.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.windowing;
+
+import org.apache.flink.streaming.examples.windowing.TopSpeedWindowing;
+import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link TopSpeedWindowing}.
+ */
+public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
+
+	protected String textPath;
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		setParallelism(1); //needed to ensure total ordering for windows
+		textPath = createTempFile("text.txt", TopSpeedWindowingExampleData.CAR_DATA);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(TopSpeedWindowingExampleData.TOP_SPEEDS, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		TopSpeedWindowing.main(new String[]{
+				"--input", textPath,
+				"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/WindowWordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/WindowWordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/WindowWordCountITCase.java
new file mode 100644
index 0000000..0025d94
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/windowing/WindowWordCountITCase.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.windowing;
+
+import org.apache.flink.streaming.examples.windowing.WindowWordCount;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+import org.apache.flink.test.testdata.WordCountData;
+
+/**
+ * Tests for {@link WindowWordCount}.
+ */
+public class WindowWordCountITCase extends StreamingProgramTestBase {
+
+	protected String textPath;
+	protected String resultPath;
+	protected String windowSize = "250";
+	protected String slideSize = "150";
+
+	@Override
+	protected void preSubmit() throws Exception {
+		textPath = createTempFile("text.txt", WordCountData.TEXT);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		// since the parallel tokenizers might have different speed
+		// the exact output can not be checked just whether it is well-formed
+		// checks that the result lines look like e.g. (faust, 2)
+		checkLinesAgainstRegexp(resultPath, "^\\([a-z]+,(\\d)+\\)");
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		WindowWordCount.main(new String[]{
+				"--input", textPath,
+				"--output", resultPath,
+				"--window", windowSize,
+				"--slide", slideSize});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/PojoExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/PojoExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/PojoExampleITCase.java
new file mode 100644
index 0000000..76adb0d
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/PojoExampleITCase.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.wordcount;
+
+import org.apache.flink.streaming.examples.wordcount.PojoExample;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+import org.apache.flink.test.testdata.WordCountData;
+
+/**
+ * Tests for {@link PojoExample}.
+ */
+public class PojoExampleITCase extends StreamingProgramTestBase {
+
+	protected String textPath;
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		textPath = createTempFile("text.txt", WordCountData.TEXT);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		PojoExample.main(new String[]{
+				"--input", textPath,
+				"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/WordCountITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/WordCountITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/WordCountITCase.java
new file mode 100644
index 0000000..96dfafb
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/examples/wordcount/WordCountITCase.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.examples.wordcount;
+
+import org.apache.flink.streaming.examples.wordcount.WordCount;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+import org.apache.flink.test.testdata.WordCountData;
+
+/**
+ * Tests for {@link WordCount}.
+ */
+public class WordCountITCase extends StreamingProgramTestBase {
+
+	protected String textPath;
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		textPath = createTempFile("text.txt", WordCountData.TEXT);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		WordCount.main(new String[]{
+				"--input", textPath,
+				"--output", resultPath});
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/scala/examples/windowing/TopSpeedWindowingExampleITCase.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/scala/examples/windowing/TopSpeedWindowingExampleITCase.java b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/scala/examples/windowing/TopSpeedWindowingExampleITCase.java
new file mode 100644
index 0000000..8dbfe88
--- /dev/null
+++ b/flink-examples/flink-examples-streaming/src/test/java/org/apache/flink/streaming/test/scala/examples/windowing/TopSpeedWindowingExampleITCase.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.streaming.test.scala.examples.windowing;
+
+import org.apache.flink.streaming.examples.windowing.util.TopSpeedWindowingExampleData;
+import org.apache.flink.streaming.scala.examples.windowing.TopSpeedWindowing;
+import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
+/**
+ * Tests for {@link TopSpeedWindowing}.
+ */
+public class TopSpeedWindowingExampleITCase extends StreamingProgramTestBase {
+	protected String textPath;
+	protected String resultPath;
+
+	@Override
+	protected void preSubmit() throws Exception {
+		setParallelism(1); //needed to ensure total ordering for windows
+		textPath = createTempFile("text.txt", TopSpeedWindowingExampleData.CAR_DATA);
+		resultPath = getTempDirPath("result");
+	}
+
+	@Override
+	protected void postSubmit() throws Exception {
+		compareResultsByLinesInMemory(TopSpeedWindowingExampleData.TOP_CASE_CLASS_SPEEDS, resultPath);
+	}
+
+	@Override
+	protected void testProgram() throws Exception {
+		TopSpeedWindowing.main(new String[]{
+				"--input", textPath,
+				"--output", resultPath});
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/1cd0ee74/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
----------------------------------------------------------------------
diff --git a/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala b/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
index 93f262d..0e67be5 100644
--- a/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
+++ b/flink-examples/flink-examples-streaming/src/test/scala/org/apache/flink/streaming/scala/examples/WindowJoinITCase.scala
@@ -26,7 +26,7 @@ import org.apache.flink.streaming.api.TimeCharacteristic
 import org.apache.flink.streaming.api.scala._
 import org.apache.flink.streaming.scala.examples.join.WindowJoin
 import org.apache.flink.streaming.scala.examples.join.WindowJoin.{Grade, Salary}
-import org.apache.flink.streaming.test.exampleJavaPrograms.join.WindowJoinData
+import org.apache.flink.streaming.test.examples.join.WindowJoinData
 import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase
 import org.apache.flink.test.util.TestBaseUtils
 import org.junit.Test