You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by dw...@apache.org on 2017/07/25 08:19:03 UTC

[1/6] flink git commit: [FLINK-7181] Activate checkstyle flink-java/operators/*

Repository: flink
Updated Branches:
  refs/heads/master 8bee54395 -> 53d6582d3


http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java
index 2ad133c..ccd6139 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/UnsortedGrouping.java
@@ -38,23 +38,27 @@ import org.apache.flink.api.java.typeutils.TupleTypeInfo;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 import org.apache.flink.util.Preconditions;
 
+/**
+ * A {@link Grouping} that is unsorted.
+ * @param <T> type of elements
+ */
 @Public
 public class UnsortedGrouping<T> extends Grouping<T> {
 
 	public UnsortedGrouping(DataSet<T> set, Keys<T> keys) {
 		super(set, keys);
 	}
-	
+
 	/**
 	 * Uses a custom partitioner for the grouping.
-	 * 
+	 *
 	 * @param partitioner The custom partitioner.
 	 * @return The grouping object itself, to allow for method chaining.
 	 */
 	public UnsortedGrouping<T> withPartitioner(Partitioner<?> partitioner) {
 		Preconditions.checkNotNull(partitioner);
 		getKeys().validateCustomPartitioner(partitioner, null);
-		
+
 		this.customPartitioner = partitioner;
 		return this;
 	}
@@ -62,18 +66,19 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	// --------------------------------------------------------------------------------------------
 	//  Operations / Transformations
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
-	 * Applies an Aggregate transformation on a grouped {@link org.apache.flink.api.java.tuple.Tuple} {@link DataSet}.<br>
-	 * <b>Note: Only Tuple DataSets can be aggregated.</b>
-	 * The transformation applies a built-in {@link Aggregations Aggregation} on a specified field 
-	 *   of a Tuple group. Additional aggregation functions can be added to the resulting 
+	 * Applies an Aggregate transformation on a grouped {@link org.apache.flink.api.java.tuple.Tuple} {@link DataSet}.
+	 *
+	 * <p><b>Note: Only Tuple DataSets can be aggregated.</b>
+	 * The transformation applies a built-in {@link Aggregations Aggregation} on a specified field
+	 *   of a Tuple group. Additional aggregation functions can be added to the resulting
 	 *   {@link AggregateOperator} by calling {@link AggregateOperator#and(Aggregations, int)}.
-	 * 
+	 *
 	 * @param agg The built-in aggregation function that is computed.
 	 * @param field The index of the Tuple field on which the aggregation function is applied.
-	 * @return An AggregateOperator that represents the aggregated DataSet. 
-	 * 
+	 * @return An AggregateOperator that represents the aggregated DataSet.
+	 *
 	 * @see org.apache.flink.api.java.tuple.Tuple
 	 * @see Aggregations
 	 * @see AggregateOperator
@@ -82,14 +87,14 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	public AggregateOperator<T> aggregate(Aggregations agg, int field) {
 		return aggregate(agg, field, Utils.getCallLocationName());
 	}
-	
+
 	// private helper that allows to set a different call location name
 	private AggregateOperator<T> aggregate(Aggregations agg, int field, String callLocationName) {
 		return new AggregateOperator<T>(this, agg, field, callLocationName);
 	}
 
 	/**
-	 * Syntactic sugar for aggregate (SUM, field)
+	 * Syntactic sugar for aggregate (SUM, field).
 	 * @param field The index of the Tuple field on which the aggregation function is applied.
 	 * @return An AggregateOperator that represents the summed DataSet.
 	 *
@@ -100,7 +105,7 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	}
 
 	/**
-	 * Syntactic sugar for aggregate (MAX, field)
+	 * Syntactic sugar for aggregate (MAX, field).
 	 * @param field The index of the Tuple field on which the aggregation function is applied.
 	 * @return An AggregateOperator that represents the max'ed DataSet.
 	 *
@@ -111,7 +116,7 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	}
 
 	/**
-	 * Syntactic sugar for aggregate (MIN, field)
+	 * Syntactic sugar for aggregate (MIN, field).
 	 * @param field The index of the Tuple field on which the aggregation function is applied.
 	 * @return An AggregateOperator that represents the min'ed DataSet.
 	 *
@@ -120,16 +125,17 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	public AggregateOperator<T> min (int field) {
 		return this.aggregate (Aggregations.MIN, field, Utils.getCallLocationName());
 	}
-	
+
 	/**
-	 * Applies a Reduce transformation on a grouped {@link DataSet}.<br>
-	 * For each group, the transformation consecutively calls a {@link org.apache.flink.api.common.functions.RichReduceFunction}
-	 *   until only a single element for each group remains. 
+	 * Applies a Reduce transformation on a grouped {@link DataSet}.
+	 *
+	 * <p>For each group, the transformation consecutively calls a {@link org.apache.flink.api.common.functions.RichReduceFunction}
+	 *   until only a single element for each group remains.
 	 * A ReduceFunction combines two elements into one new element of the same type.
-	 * 
+	 *
 	 * @param reducer The ReduceFunction that is applied on each group of the DataSet.
 	 * @return A ReduceOperator that represents the reduced DataSet.
-	 * 
+	 *
 	 * @see org.apache.flink.api.common.functions.RichReduceFunction
 	 * @see ReduceOperator
 	 * @see DataSet
@@ -140,16 +146,17 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 		}
 		return new ReduceOperator<T>(this, inputDataSet.clean(reducer), Utils.getCallLocationName());
 	}
-	
+
 	/**
-	 * Applies a GroupReduce transformation on a grouped {@link DataSet}.<br>
-	 * The transformation calls a {@link org.apache.flink.api.common.functions.RichGroupReduceFunction} for each group of the DataSet.
+	 * Applies a GroupReduce transformation on a grouped {@link DataSet}.
+	 *
+	 * <p>The transformation calls a {@link org.apache.flink.api.common.functions.RichGroupReduceFunction} for each group of the DataSet.
 	 * A GroupReduceFunction can iterate over all elements of a group and emit any
 	 *   number of output elements including none.
-	 * 
+	 *
 	 * @param reducer The GroupReduceFunction that is applied on each group of the DataSet.
 	 * @return A GroupReduceOperator that represents the reduced DataSet.
-	 * 
+	 *
 	 * @see org.apache.flink.api.common.functions.RichGroupReduceFunction
 	 * @see GroupReduceOperator
 	 * @see DataSet
@@ -187,72 +194,77 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 	}
 
 	/**
-	 * Returns a new set containing the first n elements in this grouped {@link DataSet}.<br>
+	 * Returns a new set containing the first n elements in this grouped {@link DataSet}.
+	 *
 	 * @param n The desired number of elements for each group.
 	 * @return A GroupReduceOperator that represents the DataSet containing the elements.
 	*/
 	public GroupReduceOperator<T, T> first(int n) {
-		if(n < 1) {
+		if (n < 1) {
 			throw new InvalidProgramException("Parameter n of first(n) must be at least 1.");
 		}
-		
+
 		return reduceGroup(new FirstReducer<T>(n));
 	}
 
 	/**
-	 * Applies a special case of a reduce transformation (minBy) on a grouped {@link DataSet}.<br>
-	 * The transformation consecutively calls a {@link ReduceFunction} 
+	 * Applies a special case of a reduce transformation (minBy) on a grouped {@link DataSet}.
+	 *
+	 * <p>The transformation consecutively calls a {@link ReduceFunction}
 	 * until only a single element remains which is the result of the transformation.
 	 * A ReduceFunction combines two elements into one new element of the same type.
-	 *  
+	 *
 	 * @param fields Keys taken into account for finding the minimum.
 	 * @return A {@link ReduceOperator} representing the minimum.
 	 */
 	@SuppressWarnings({ "unchecked", "rawtypes" })
 	public ReduceOperator<T> minBy(int... fields)  {
-		
+
 		// Check for using a tuple
-		if(!this.inputDataSet.getType().isTupleType()) {
+		if (!this.inputDataSet.getType().isTupleType()) {
 			throw new InvalidProgramException("Method minBy(int) only works on tuples.");
 		}
-			
+
 		return new ReduceOperator<T>(this, new SelectByMinFunction(
 				(TupleTypeInfo) this.inputDataSet.getType(), fields), Utils.getCallLocationName());
 	}
-	
+
 	/**
-	 * Applies a special case of a reduce transformation (maxBy) on a grouped {@link DataSet}.<br>
-	 * The transformation consecutively calls a {@link ReduceFunction} 
+	 * Applies a special case of a reduce transformation (maxBy) on a grouped {@link DataSet}.
+	 *
+	 * <p>The transformation consecutively calls a {@link ReduceFunction}
 	 * until only a single element remains which is the result of the transformation.
 	 * A ReduceFunction combines two elements into one new element of the same type.
-	 *  
+	 *
 	 * @param fields Keys taken into account for finding the minimum.
 	 * @return A {@link ReduceOperator} representing the minimum.
 	 */
 	@SuppressWarnings({ "unchecked", "rawtypes" })
 	public ReduceOperator<T> maxBy(int... fields)  {
-		
+
 		// Check for using a tuple
-		if(!this.inputDataSet.getType().isTupleType()) {
+		if (!this.inputDataSet.getType().isTupleType()) {
 			throw new InvalidProgramException("Method maxBy(int) only works on tuples.");
 		}
-			
+
 		return new ReduceOperator<T>(this, new SelectByMaxFunction(
 				(TupleTypeInfo) this.inputDataSet.getType(), fields), Utils.getCallLocationName());
 	}
 	// --------------------------------------------------------------------------------------------
 	//  Group Operations
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
-	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group on the specified field in the specified {@link Order}.<br>
-	 * <b>Note: Only groups of Tuple elements and Pojos can be sorted.</b><br>
-	 * Groups can be sorted by multiple fields by chaining {@link #sortGroup(int, Order)} calls.
-	 * 
+	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group on the specified field in the specified {@link Order}.
+	 *
+	 * <p><b>Note: Only groups of Tuple elements and Pojos can be sorted.</b>
+	 *
+	 * <p>Groups can be sorted by multiple fields by chaining {@link #sortGroup(int, Order)} calls.
+	 *
 	 * @param field The Tuple field on which the group is sorted.
 	 * @param order The Order in which the specified Tuple field is sorted.
 	 * @return A SortedGrouping with specified order of group element.
-	 * 
+	 *
 	 * @see org.apache.flink.api.java.tuple.Tuple
 	 * @see Order
 	 */
@@ -265,16 +277,18 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 		sg.customPartitioner = getCustomPartitioner();
 		return sg;
 	}
-	
+
 	/**
-	 * Sorts Pojos within a group on the specified field in the specified {@link Order}.<br>
-	 * <b>Note: Only groups of Tuple elements and Pojos can be sorted.</b><br>
-	 * Groups can be sorted by multiple fields by chaining {@link #sortGroup(String, Order)} calls.
-	 * 
+	 * Sorts Pojos within a group on the specified field in the specified {@link Order}.
+	 *
+	 * <p><b>Note: Only groups of Tuple elements and Pojos can be sorted.</b>
+	 *
+	 * <p>Groups can be sorted by multiple fields by chaining {@link #sortGroup(String, Order)} calls.
+	 *
 	 * @param field The Tuple or Pojo field on which the group is sorted.
 	 * @param order The Order in which the specified field is sorted.
 	 * @return A SortedGrouping with specified order of group element.
-	 * 
+	 *
 	 * @see Order
 	 */
 	public SortedGrouping<T> sortGroup(String field, Order order) {
@@ -289,8 +303,9 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 
 	/**
 	 * Sorts elements within a group on a key extracted by the specified {@link org.apache.flink.api.java.functions.KeySelector}
-	 * in the specified {@link Order}.<br>
-	 * Chaining {@link #sortGroup(KeySelector, Order)} calls is not supported.
+	 * in the specified {@link Order}.
+	 *
+	 * <p>Chaining {@link #sortGroup(KeySelector, Order)} calls is not supported.
 	 *
 	 * @param keySelector The KeySelector with which the group is sorted.
 	 * @param order The Order in which the extracted key is sorted.
@@ -308,5 +323,5 @@ public class UnsortedGrouping<T> extends Grouping<T> {
 		sg.customPartitioner = getCustomPartitioner();
 		return sg;
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinOperatorSetsBase.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinOperatorSetsBase.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinOperatorSetsBase.java
index a474783..770e0e8 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinOperatorSetsBase.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinOperatorSetsBase.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.InvalidProgramException;
 import org.apache.flink.api.common.functions.FlatJoinFunction;
 import org.apache.flink.api.common.functions.JoinFunction;
+import org.apache.flink.api.common.operators.Keys;
 import org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
@@ -29,13 +30,13 @@ import org.apache.flink.api.java.Utils;
 import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.operators.JoinOperator.DefaultJoin;
 import org.apache.flink.api.java.operators.JoinOperator.EquiJoin;
-import org.apache.flink.api.common.operators.Keys;
 import org.apache.flink.api.java.tuple.Tuple;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 
 /**
- * Intermediate step of an Outer Join transformation. <br>
- * To continue the Join transformation, select the join key of the first input {@link DataSet} by calling
+ * Intermediate step of an Outer Join transformation.
+ *
+ * <p>To continue the Join transformation, select the join key of the first input {@link DataSet} by calling
  * {@link JoinOperatorSetsBase#where(int...)} or
  * {@link JoinOperatorSetsBase#where(KeySelector)}.
  *
@@ -71,9 +72,11 @@ public class JoinOperatorSetsBase<I1, I2> {
 	}
 
 	/**
-	 * Continues a Join transformation. <br>
-	 * Defines the {@link Tuple} fields of the first join {@link DataSet} that should be used as join keys.<br>
-	 * <b>Note: Fields can only be selected as join keys on Tuple DataSets.</b><br>
+	 * Continues a Join transformation.
+	 *
+	 * <p>Defines the {@link Tuple} fields of the first join {@link DataSet} that should be used as join keys.
+	 *
+	 * <p><b>Note: Fields can only be selected as join keys on Tuple DataSets.</b>
 	 *
 	 * @param fields The indexes of the other Tuple fields of the first join DataSets that should be used as keys.
 	 * @return An incomplete Join transformation.
@@ -89,8 +92,9 @@ public class JoinOperatorSetsBase<I1, I2> {
 	}
 
 	/**
-	 * Continues a Join transformation. <br>
-	 * Defines the fields of the first join {@link DataSet} that should be used as grouping keys. Fields
+	 * Continues a Join transformation.
+	 *
+	 * <p>Defines the fields of the first join {@link DataSet} that should be used as grouping keys. Fields
 	 * are the names of member fields of the underlying type of the data set.
 	 *
 	 * @param fields The  fields of the first join DataSets that should be used as keys.
@@ -107,9 +111,10 @@ public class JoinOperatorSetsBase<I1, I2> {
 	}
 
 	/**
-	 * Continues a Join transformation and defines a {@link KeySelector} function for the first join {@link DataSet}.<br>
-	 * The KeySelector function is called for each element of the first DataSet and extracts a single
-	 * key value on which the DataSet is joined. <br>
+	 * Continues a Join transformation and defines a {@link KeySelector} function for the first join {@link DataSet}.
+	 *
+	 * <p>The KeySelector function is called for each element of the first DataSet and extracts a single
+	 * key value on which the DataSet is joined.
 	 *
 	 * @param keySelector The KeySelector function which extracts the key values from the DataSet on which it is joined.
 	 * @return An incomplete Join transformation.
@@ -125,10 +130,10 @@ public class JoinOperatorSetsBase<I1, I2> {
 		return new JoinOperatorSetsPredicateBase(new Keys.SelectorFunctionKeys<>(keySelector, input1.getType(), keyType));
 	}
 
-
 	/**
-	 * Intermediate step of a Join transformation. <br>
-	 * To continue the Join transformation, select the join key of the second input {@link DataSet} by calling
+	 * Intermediate step of a Join transformation.
+	 *
+	 * <p>To continue the Join transformation, select the join key of the second input {@link DataSet} by calling
 	 * {@link org.apache.flink.api.java.operators.join.JoinOperatorSetsBase.JoinOperatorSetsPredicateBase#equalTo(int...)} or
 	 * {@link org.apache.flink.api.java.operators.join.JoinOperatorSetsBase.JoinOperatorSetsPredicateBase#equalTo(KeySelector)}.
 	 *
@@ -151,10 +156,11 @@ public class JoinOperatorSetsBase<I1, I2> {
 
 		/**
 		 * Continues a Join transformation and defines the {@link Tuple} fields of the second join
-		 * {@link DataSet} that should be used as join keys.<br>
-		 * <b>Note: Fields can only be selected as join keys on Tuple DataSets.</b><br>
+		 * {@link DataSet} that should be used as join keys.
 		 *
-		 * The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
+		 * <p><b>Note: Fields can only be selected as join keys on Tuple DataSets.</b>
+		 *
+		 * <p>The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
 		 * {@link JoinFunction} by calling {@link JoinFunctionAssigner#with(JoinFunction)}
 		 *
 		 * @param fields The indexes of the Tuple fields of the second join DataSet that should be used as keys.
@@ -166,9 +172,9 @@ public class JoinOperatorSetsBase<I1, I2> {
 
 		/**
 		 * Continues a Join transformation and defines the fields of the second join
-		 * {@link DataSet} that should be used as join keys.<br>
+		 * {@link DataSet} that should be used as join keys.
 		 *
-		 * The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
+		 * <p>The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
 		 * {@link JoinFunction} by calling {@link JoinFunctionAssigner#with(JoinFunction)}
 		 *
 		 * @param fields The fields of the second join DataSet that should be used as keys.
@@ -179,11 +185,12 @@ public class JoinOperatorSetsBase<I1, I2> {
 		}
 
 		/**
-		 * Continues a Join transformation and defines a {@link KeySelector} function for the second join {@link DataSet}.<br>
-		 * The KeySelector function is called for each element of the second DataSet and extracts a single
-		 * key value on which the DataSet is joined. <br>
+		 * Continues a Join transformation and defines a {@link KeySelector} function for the second join {@link DataSet}.
+		 *
+		 * <p>The KeySelector function is called for each element of the second DataSet and extracts a single
+		 * key value on which the DataSet is joined.
 		 *
-		 * The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
+		 * <p>The resulting {@link JoinFunctionAssigner} needs to be finished by providing a
 		 * {@link JoinFunction} by calling {@link JoinFunctionAssigner#with(JoinFunction)}
 		 *
 		 * @param keySelector The KeySelector function which extracts the key values from the second DataSet on which it is joined.
@@ -211,7 +218,7 @@ public class JoinOperatorSetsBase<I1, I2> {
 			try {
 				keys1.areCompatible(keys2);
 			} catch (Keys.IncompatibleKeysException e) {
-				throw new InvalidProgramException("The pair of join keys are not compatible with each other.",e);
+				throw new InvalidProgramException("The pair of join keys are not compatible with each other.", e);
 			}
 			return new DefaultJoin<>(input1, input2, keys1, keys2, joinHint, Utils.getCallLocationName(4), joinType);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinType.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinType.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinType.java
index 58866de..2a668a8 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinType.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/join/JoinType.java
@@ -20,6 +20,9 @@ package org.apache.flink.api.java.operators.join;
 
 import org.apache.flink.annotation.Public;
 
+/**
+ * Join types.
+ */
 @Public
 public enum JoinType {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/test/java/org/apache/flink/api/java/operators/NamesTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/operators/NamesTest.java b/flink-java/src/test/java/org/apache/flink/api/java/operators/NamesTest.java
index 09a705c..5597b8f 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/operators/NamesTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/operators/NamesTest.java
@@ -18,11 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.flink.api.common.Plan;
 import org.apache.flink.api.common.functions.FilterFunction;
 import org.apache.flink.api.common.functions.FlatJoinFunction;
@@ -35,9 +30,15 @@ import org.apache.flink.api.java.operators.translation.PlanFilterOperator;
 import org.apache.flink.api.java.tuple.Tuple1;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.Visitor;
+
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 /**
  * Test proper automated assignment of the transformation's name, if not set by the user.
  */
@@ -50,7 +51,6 @@ public class NamesTest implements Serializable {
 
 		DataSet<String> strs = env.fromCollection(Arrays.asList("a", "b"));
 
-
 		// WARNING: The test will fail if this line is being moved down in the file (the line-number is hard-coded)
 		strs.filter(new FilterFunction<String>() {
 			private static final long serialVersionUID = 1L;
@@ -92,8 +92,7 @@ public class NamesTest implements Serializable {
 		DataSet<Tuple1<String>> strs1 = env.fromCollection(strLi);
 		strs.join(strs1).where(0).equalTo(0).with(new FlatJoinFunction<Tuple1<String>, Tuple1<String>, String>() {
 			@Override
-			public void join(Tuple1<String> first, Tuple1<String> second,
-							 Collector<String> out) throws Exception {
+			public void join(Tuple1<String> first, Tuple1<String> second, Collector<String> out) throws Exception {
 				//
 			}
 		})
@@ -102,11 +101,12 @@ public class NamesTest implements Serializable {
 		plan.accept(new Visitor<Operator<?>>() {
 			@Override
 			public boolean preVisit(Operator<?> visitable) {
-				if(visitable instanceof InnerJoinOperatorBase) {
+				if (visitable instanceof InnerJoinOperatorBase) {
 					Assert.assertEquals("Join at testJoinWith(NamesTest.java:93)", visitable.getName());
 				}
 				return true;
 			}
+
 			@Override
 			public void postVisit(Operator<?> visitable) {}
 		});
@@ -116,7 +116,7 @@ public class NamesTest implements Serializable {
 		plan.accept(new Visitor<Operator<?>>() {
 			@Override
 			public boolean preVisit(Operator<?> visitable) {
-				if(visitable instanceof PlanFilterOperator<?>) {
+				if (visitable instanceof PlanFilterOperator<?>) {
 					// cast is actually not required. Its just a check for the right element
 					PlanFilterOperator<?> filterOp = (PlanFilterOperator<?>) visitable;
 					Assert.assertEquals(expected, filterOp.getName());

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/tools/maven/suppressions-java.xml
----------------------------------------------------------------------
diff --git a/tools/maven/suppressions-java.xml b/tools/maven/suppressions-java.xml
index 3b7d60b..9ddacba 100644
--- a/tools/maven/suppressions-java.xml
+++ b/tools/maven/suppressions-java.xml
@@ -60,10 +60,6 @@ under the License.
 		checks="AvoidStarImport"/>
 
 	<suppress
-		files="(.*)api[/\\]java[/\\]operators[/\\]([^/\\]*\.java)"
-		checks="NewlineAtEndOfFile|RegexpSingleline|TodoComment|RedundantImport|ImportOrder|RedundantModifier|JavadocMethod|JavadocParagraph|JavadocType|JavadocStyle|PackageName|TypeNameCheck|ConstantNameCheck|StaticVariableNameCheck|MemberNameCheck|MethodNameCheck|ParameterName|LocalFinalVariableName|LocalVariableName|LeftCurly|UpperEll|FallThrough|reliefPattern|SimplifyBooleanExpression|EmptyStatement|ModifierOrder|EmptyLineSeparator|WhitespaceAround|WhitespaceAfter|NoWhitespaceAfter|NoWhitespaceBefore|OperatorWrap|ParenPad"/>
-
-	<suppress
 		files="(.*)api[/\\]java[/\\]operator[/\\]([^/\\]*\.java)"
 		checks="NewlineAtEndOfFile|RegexpSingleline|TodoComment|RedundantImport|ImportOrder|RedundantModifier|JavadocMethod|JavadocParagraph|JavadocType|JavadocStyle|PackageName|TypeNameCheck|ConstantNameCheck|StaticVariableNameCheck|MemberNameCheck|MethodNameCheck|ParameterName|LocalFinalVariableName|LocalVariableName|LeftCurly|UpperEll|FallThrough|reliefPattern|SimplifyBooleanExpression|EmptyStatement|ModifierOrder|EmptyLineSeparator|WhitespaceAround|WhitespaceAfter|NoWhitespaceAfter|NoWhitespaceBefore|OperatorWrap|ParenPad"/>
 

[2/6] flink git commit: [FLINK-7181] Activate checkstyle flink-java/operators/*

Posted by dw...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/PartitionOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/PartitionOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/PartitionOperator.java
index b3234b8..8a9a72d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/PartitionOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/PartitionOperator.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.api.java.operators;
 
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.distributions.DataDistribution;
 import org.apache.flink.api.common.functions.Partitioner;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.Order;
 import org.apache.flink.api.common.operators.Ordering;
@@ -33,7 +33,6 @@ import org.apache.flink.api.common.operators.base.PartitionOperatorBase;
 import org.apache.flink.api.common.operators.base.PartitionOperatorBase.PartitionMethod;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.util.Preconditions;
 
@@ -46,7 +45,7 @@ import java.util.Arrays;
  */
 @Public
 public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOperator<T>> {
-	
+
 	private final Keys<T> pKeys;
 	private final PartitionMethod pMethod;
 	private final String partitionLocationName;
@@ -54,7 +53,6 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 	private final DataDistribution distribution;
 	private Order[] orders;
 
-
 	public PartitionOperator(DataSet<T> input, PartitionMethod pMethod, Keys<T> pKeys, String partitionLocationName) {
 		this(input, pMethod, pKeys, null, null, null, partitionLocationName);
 	}
@@ -66,37 +64,35 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 	public PartitionOperator(DataSet<T> input, PartitionMethod pMethod, String partitionLocationName) {
 		this(input, pMethod, null, null, null, null, partitionLocationName);
 	}
-	
+
 	public PartitionOperator(DataSet<T> input, Keys<T> pKeys, Partitioner<?> customPartitioner, String partitionLocationName) {
 		this(input, PartitionMethod.CUSTOM, pKeys, customPartitioner, null, null, partitionLocationName);
 	}
-	
+
 	public <P> PartitionOperator(DataSet<T> input, Keys<T> pKeys, Partitioner<P> customPartitioner,
-			TypeInformation<P> partitionerTypeInfo, String partitionLocationName)
-	{
+			TypeInformation<P> partitionerTypeInfo, String partitionLocationName) {
 		this(input, PartitionMethod.CUSTOM, pKeys, customPartitioner, partitionerTypeInfo, null, partitionLocationName);
 	}
-	
+
 	private <P> PartitionOperator(DataSet<T> input, PartitionMethod pMethod, Keys<T> pKeys, Partitioner<P> customPartitioner,
-			TypeInformation<P> partitionerTypeInfo, DataDistribution distribution, String partitionLocationName)
-	{
+			TypeInformation<P> partitionerTypeInfo, DataDistribution distribution, String partitionLocationName) {
 		super(input, input.getType());
-		
+
 		Preconditions.checkNotNull(pMethod);
 		Preconditions.checkArgument(pKeys != null || pMethod == PartitionMethod.REBALANCE, "Partitioning requires keys");
 		Preconditions.checkArgument(pMethod != PartitionMethod.CUSTOM || customPartitioner != null, "Custom partioning requires a partitioner.");
 		Preconditions.checkArgument(distribution == null || pMethod == PartitionMethod.RANGE, "Customized data distribution is only neccessary for range partition.");
-		
+
 		if (distribution != null) {
 			Preconditions.checkArgument(pKeys.getNumberOfKeyFields() <= distribution.getNumberOfFields(), "The distribution must provide at least as many fields as flat key fields are specified.");
 			Preconditions.checkArgument(Arrays.equals(pKeys.getKeyFieldTypes(), Arrays.copyOfRange(distribution.getKeyTypes(), 0, pKeys.getNumberOfKeyFields())),
 					"The types of the flat key fields must be equal to the types of the fields of the distribution.");
 		}
-		
+
 		if (customPartitioner != null) {
 			pKeys.validateCustomPartitioner(customPartitioner, partitionerTypeInfo);
 		}
-		
+
 		this.pMethod = pMethod;
 		this.pKeys = pKeys;
 		this.partitionLocationName = partitionLocationName;
@@ -121,43 +117,43 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 
 		return this;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	//  Properties
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Gets the custom partitioner from this partitioning.
-	 * 
+	 *
 	 * @return The custom partitioner.
 	 */
 	@Internal
 	public Partitioner<?> getCustomPartitioner() {
 		return customPartitioner;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	//  Translation
 	// --------------------------------------------------------------------------------------------
-	
+
 	protected org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateToDataFlow(Operator<T> input) {
-	
+
 		String name = "Partition at " + partitionLocationName;
-		
+
 		// distinguish between partition types
 		if (pMethod == PartitionMethod.REBALANCE) {
-			
+
 			UnaryOperatorInformation<T, T> operatorInfo = new UnaryOperatorInformation<>(getType(), getType());
 			PartitionOperatorBase<T> rebalancedInput = new PartitionOperatorBase<>(operatorInfo, pMethod, name);
 			rebalancedInput.setInput(input);
 			rebalancedInput.setParallelism(getParallelism());
-			
+
 			return rebalancedInput;
-		} 
+		}
 		else if (pMethod == PartitionMethod.HASH || pMethod == PartitionMethod.CUSTOM || pMethod == PartitionMethod.RANGE) {
-			
+
 			if (pKeys instanceof Keys.ExpressionKeys) {
-				
+
 				int[] logicalKeyPositions = pKeys.computeLogicalKeyPositions();
 				UnaryOperatorInformation<T, T> operatorInfo = new UnaryOperatorInformation<>(getType(), getType());
 				PartitionOperatorBase<T> partitionedInput = new PartitionOperatorBase<>(operatorInfo, pMethod, logicalKeyPositions, name);
@@ -166,11 +162,11 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 				partitionedInput.setDistribution(distribution);
 				partitionedInput.setCustomPartitioner(customPartitioner);
 				partitionedInput.setOrdering(computeOrdering(pKeys, orders));
-				
+
 				return partitionedInput;
 			}
 			else if (pKeys instanceof Keys.SelectorFunctionKeys) {
-				
+
 				@SuppressWarnings("unchecked")
 				Keys.SelectorFunctionKeys<T, ?> selectorKeys = (Keys.SelectorFunctionKeys<T, ?>) pKeys;
 				return translateSelectorFunctionPartitioner(selectorKeys, pMethod, name, input, getParallelism(),
@@ -179,8 +175,8 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 			else {
 				throw new UnsupportedOperationException("Unrecognized key type.");
 			}
-			
-		} 
+
+		}
 		else {
 			throw new UnsupportedOperationException("Unsupported partitioning method: " + pMethod.name());
 		}
@@ -217,8 +213,7 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 		Operator<T> input,
 		int partitionDop,
 		Partitioner<?> customPartitioner,
-		Order[] orders)
-	{
+		Order[] orders) {
 		final SelectorFunctionKeys<T, K> keys = (SelectorFunctionKeys<T, K>) rawKeys;
 		TypeInformation<Tuple2<K, T>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys);
 
@@ -234,5 +229,4 @@ public class PartitionOperator<T> extends SingleInputOperator<T, T, PartitionOpe
 		return KeyFunctions.appendKeyRemover(keyedPartitionedInput, keys);
 	}
 
-	
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java
index 0068582..d8cc916 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/ProjectOperator.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.Arrays;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
@@ -31,34 +29,58 @@ import org.apache.flink.api.common.typeutils.CompositeType;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.operators.translation.PlanProjectOperator;
+import org.apache.flink.api.java.tuple.Tuple;
+import org.apache.flink.api.java.tuple.Tuple1;
+import org.apache.flink.api.java.tuple.Tuple10;
+import org.apache.flink.api.java.tuple.Tuple11;
+import org.apache.flink.api.java.tuple.Tuple12;
+import org.apache.flink.api.java.tuple.Tuple13;
+import org.apache.flink.api.java.tuple.Tuple14;
+import org.apache.flink.api.java.tuple.Tuple15;
+import org.apache.flink.api.java.tuple.Tuple16;
+import org.apache.flink.api.java.tuple.Tuple17;
+import org.apache.flink.api.java.tuple.Tuple18;
+import org.apache.flink.api.java.tuple.Tuple19;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple20;
+import org.apache.flink.api.java.tuple.Tuple21;
+import org.apache.flink.api.java.tuple.Tuple22;
+import org.apache.flink.api.java.tuple.Tuple23;
+import org.apache.flink.api.java.tuple.Tuple24;
+import org.apache.flink.api.java.tuple.Tuple25;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.tuple.Tuple5;
+import org.apache.flink.api.java.tuple.Tuple6;
+import org.apache.flink.api.java.tuple.Tuple7;
+import org.apache.flink.api.java.tuple.Tuple8;
+import org.apache.flink.api.java.tuple.Tuple9;
 import org.apache.flink.api.java.typeutils.TupleTypeInfo;
 import org.apache.flink.util.Preconditions;
 
-//CHECKSTYLE.OFF: AvoidStarImport - Needed for TupleGenerator
-import org.apache.flink.api.java.tuple.*;
-//CHECKSTYLE.ON: AvoidStarImport
+import java.util.Arrays;
 
 /**
  * This operator represents the application of a projection operation on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set projected by the operator.
  * @param <OUT> The type of data set that is the result of the projection.
  */
 @Public
-public class ProjectOperator<IN, OUT extends Tuple> 
+public class ProjectOperator<IN, OUT extends Tuple>
 	extends SingleInputOperator<IN, OUT, ProjectOperator<IN, OUT>> {
-	
+
 	protected final int[] fields;
 
 	public ProjectOperator(DataSet<IN> input, int[] fields, TupleTypeInfo<OUT> returnType) {
 		super(input, returnType);
-	
+
 		this.fields = fields;
 	}
 
 	@Override
-	protected org.apache.flink.api.common.operators.base.MapOperatorBase<IN, OUT, MapFunction<IN,OUT>> translateToDataFlow(Operator<IN> input) {
+	protected org.apache.flink.api.common.operators.base.MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) {
 		String name = getName() != null ? getName() : "Projection " + Arrays.toString(fields);
 		// create operator
 		PlanProjectOperator<IN, OUT> ppo = new PlanProjectOperator<IN, OUT>(fields, name, getInputType(), getResultType(), context.getConfig());
@@ -70,6 +92,7 @@ public class ProjectOperator<IN, OUT extends Tuple>
 
 		return ppo;
 	}
+
 	/**
 	 * @deprecated Deprecated method only kept for compatibility.
 	 */
@@ -77,61 +100,64 @@ public class ProjectOperator<IN, OUT extends Tuple>
 	@Deprecated
 	@PublicEvolving
 	public <R extends Tuple> ProjectOperator<IN, R> types(Class<?>... types) {
-		TupleTypeInfo<R> typeInfo = (TupleTypeInfo<R>)this.getResultType();
+		TupleTypeInfo<R> typeInfo = (TupleTypeInfo<R>) this.getResultType();
 
-		if(types.length != typeInfo.getArity()) {
+		if (types.length != typeInfo.getArity()) {
 			throw new InvalidProgramException("Provided types do not match projection.");
 		}
-		for (int i=0; i<types.length; i++) {
+		for (int i = 0; i < types.length; i++) {
 			Class<?> typeClass = types[i];
 			if (!typeClass.equals(typeInfo.getTypeAt(i).getTypeClass())) {
-				throw new InvalidProgramException("Provided type "+typeClass.getSimpleName()+" at position "+i+" does not match projection");
+				throw new InvalidProgramException("Provided type " + typeClass.getSimpleName() + " at position " + i + " does not match projection");
 			}
 		}
 		return (ProjectOperator<IN, R>) this;
 	}
 
+	/**
+	 * A projection of {@link DataSet}.
+	 *
+	 * @param <T>
+	 */
 	@Internal
 	public static class Projection<T> {
-		
+
 		private final DataSet<T> ds;
 		private int[] fieldIndexes;
-		
+
 		public Projection(DataSet<T> ds, int[] fieldIndexes) {
-			
-			if(!(ds.getType() instanceof TupleTypeInfo)) {
+
+			if (!(ds.getType() instanceof TupleTypeInfo)) {
 				throw new UnsupportedOperationException("project() can only be applied to DataSets of Tuples.");
 			}
-			
-			if(fieldIndexes.length == 0) {
+
+			if (fieldIndexes.length == 0) {
 				throw new IllegalArgumentException("project() needs to select at least one (1) field.");
-			} else if(fieldIndexes.length > Tuple.MAX_ARITY - 1) {
+			} else if (fieldIndexes.length > Tuple.MAX_ARITY - 1) {
 				throw new IllegalArgumentException(
-						"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
+					"project() may select only up to (" + (Tuple.MAX_ARITY - 1) + ") fields.");
 			}
-			
+
 			int maxFieldIndex = ds.getType().getArity();
 			for (int fieldIndexe : fieldIndexes) {
 				Preconditions.checkElementIndex(fieldIndexe, maxFieldIndex);
 			}
-			
+
 			this.ds = ds;
 			this.fieldIndexes = fieldIndexes;
 		}
-		
-		
-		// --------------------------------------------------------------------------------------------	
+
+		// --------------------------------------------------------------------------------------------
 		// The following lines are generated.
-		// --------------------------------------------------------------------------------------------	
-		// BEGIN_OF_TUPLE_DEPENDENT_CODE	
-	// GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator.
+		// --------------------------------------------------------------------------------------------
+		// BEGIN_OF_TUPLE_DEPENDENT_CODE
+		// GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator.
 
 		/**
 		 * Chooses a projectTupleX according to the length of
-		 * {@link org.apache.flink.api.java.operators.ProjectOperator.Projection#fieldIndexes} 
-		 * 
+		 * {@link org.apache.flink.api.java.operators.ProjectOperator.Projection#fieldIndexes}.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see org.apache.flink.api.java.operators.ProjectOperator.Projection
 		 */
 		@SuppressWarnings("unchecked")
@@ -171,10 +197,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -186,10 +211,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -201,10 +225,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -216,10 +239,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -231,10 +253,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -246,10 +267,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -261,10 +281,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -276,10 +295,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -291,10 +309,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -306,10 +323,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -321,10 +337,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -336,10 +351,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -351,10 +365,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -366,10 +379,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -381,10 +393,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -396,10 +407,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -411,10 +421,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -426,10 +435,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -441,10 +449,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -456,10 +463,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -471,10 +477,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -486,10 +491,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -501,10 +505,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -516,10 +519,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -531,10 +533,9 @@ public class ProjectOperator<IN, OUT extends Tuple>
 		}
 
 		/**
-		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields. 
-		 * 
+		 * Projects a {@link Tuple} {@link DataSet} to the previously selected fields.
+		 *
 		 * @return The projected DataSet.
-		 * 
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -547,20 +548,18 @@ public class ProjectOperator<IN, OUT extends Tuple>
 
 		// END_OF_TUPLE_DEPENDENT_CODE
 		// -----------------------------------------------------------------------------------------
-		
-		
-		
+
 		private TypeInformation<?>[] extractFieldTypes(int[] fields, TypeInformation<?> inType) {
-			
+
 			TupleTypeInfo<?> inTupleType = (TupleTypeInfo<?>) inType;
 			TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length];
-					
-			for(int i=0; i<fields.length; i++) {					
+
+			for (int i = 0; i < fields.length; i++) {
 				fieldTypes[i] = inTupleType.getTypeAt(fields[i]);
 			}
-			
+
 			return fieldTypes;
 		}
-		
+
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/ReduceOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/ReduceOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/ReduceOperator.java
index 42dcf05..6d0c58b 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/ReduceOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/ReduceOperator.java
@@ -23,58 +23,56 @@ import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.functions.ReduceFunction;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.SingleInputSemanticProperties;
 import org.apache.flink.api.common.operators.UnaryOperatorInformation;
 import org.apache.flink.api.common.operators.base.ReduceOperatorBase;
 import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.java.operators.translation.PlanUnwrappingReduceOperator;
 import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.DataSet;
 
 /**
  * This operator represents the application of a "reduce" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set reduced by the operator.
- * 
+ *
  * @see org.apache.flink.api.common.functions.ReduceFunction
  */
 @Public
 public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOperator<IN>> {
-	
+
 	private final ReduceFunction<IN> function;
-	
+
 	private final Grouping<IN> grouper;
-	
+
 	private final String defaultName;
 
 	// should be null in case of an all reduce
 	private CombineHint hint;
-	
+
 	/**
-	 * 
 	 * This is the case for a reduce-all case (in contrast to the reduce-per-group case).
-	 * 
+	 *
 	 * @param input
 	 * @param function
 	 */
 	public ReduceOperator(DataSet<IN> input, ReduceFunction<IN> function, String defaultName) {
 		super(input, input.getType());
-		
+
 		this.function = function;
 		this.grouper = null;
 		this.defaultName = defaultName;
 		this.hint = null;
 	}
-	
-	
+
 	public ReduceOperator(Grouping<IN> input, ReduceFunction<IN> function, String defaultName) {
 		super(input.getInputDataSet(), input.getInputDataSet().getType());
-		
+
 		this.function = function;
 		this.grouper = input;
 		this.defaultName = defaultName;
@@ -82,7 +80,7 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 
 		UdfOperatorUtils.analyzeSingleInputUdf(this, ReduceFunction.class, defaultName, function, grouper.keys);
 	}
-	
+
 	@Override
 	protected ReduceFunction<IN> getFunction() {
 		return function;
@@ -95,12 +93,12 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 		SingleInputSemanticProperties props = super.getSemanticProperties();
 
 		// offset semantic information by extracted key fields
-		if(props != null &&
+		if (props != null &&
 				this.grouper != null &&
 				this.grouper.keys instanceof SelectorFunctionKeys) {
 
-			int offset = ((SelectorFunctionKeys<?,?>) this.grouper.keys).getKeyType().getTotalFields();
-			if(this.grouper instanceof SortedGrouping) {
+			int offset = ((SelectorFunctionKeys<?, ?>) this.grouper.keys).getKeyType().getTotalFields();
+			if (this.grouper instanceof SortedGrouping) {
 				offset += ((SortedGrouping<?>) this.grouper).getSortSelectionFunctionKey().getKeyType().getTotalFields();
 			}
 			props = SemanticPropUtil.addSourceFieldOffset(props, this.getInputType().getTotalFields(), offset);
@@ -111,25 +109,25 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 
 	@Override
 	protected org.apache.flink.api.common.operators.SingleInputOperator<?, IN, ?> translateToDataFlow(Operator<IN> input) {
-		
-		String name = getName() != null ? getName() : "Reduce at "+defaultName;
-		
+
+		String name = getName() != null ? getName() : "Reduce at " + defaultName;
+
 		// distinguish between grouped reduce and non-grouped reduce
 		if (grouper == null) {
 			// non grouped reduce
 			UnaryOperatorInformation<IN, IN> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getInputType());
 			ReduceOperatorBase<IN, ReduceFunction<IN>> po =
 					new ReduceOperatorBase<>(function, operatorInfo, new int[0], name);
-			
+
 			po.setInput(input);
 			// the parallelism for a non grouped reduce can only be 1
 			po.setParallelism(1);
-			
+
 			return po;
 		}
-		
+
 		if (grouper.getKeys() instanceof SelectorFunctionKeys) {
-			
+
 			// reduce with key selector function
 			@SuppressWarnings("unchecked")
 			SelectorFunctionKeys<IN, ?> selectorKeys = (SelectorFunctionKeys<IN, ?>) grouper.getKeys();
@@ -141,19 +139,19 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 			return po;
 		}
 		else if (grouper.getKeys() instanceof Keys.ExpressionKeys) {
-			
+
 			// reduce with field positions
 			int[] logicalKeyPositions = grouper.getKeys().computeLogicalKeyPositions();
 			UnaryOperatorInformation<IN, IN> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getInputType());
 			ReduceOperatorBase<IN, ReduceFunction<IN>> po =
 					new ReduceOperatorBase<>(function, operatorInfo, logicalKeyPositions, name);
-			
+
 			po.setCustomPartitioner(grouper.getCustomPartitioner());
-			
+
 			po.setInput(input);
 			po.setParallelism(getParallelism());
 			po.setCombineHint(hint);
-			
+
 			return po;
 		}
 		else {
@@ -164,7 +162,7 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 	/**
 	 * Sets the strategy to use for the combine phase of the reduce.
 	 *
-	 * If this method is not called, then the default hint will be used.
+	 * <p>If this method is not called, then the default hint will be used.
 	 * ({@link org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint#OPTIMIZER_CHOOSES})
 	 *
 	 * @param strategy The hint to use.
@@ -177,7 +175,7 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 	}
 
 	// --------------------------------------------------------------------------------------------
-	
+
 	private static <T, K> org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateSelectorFunctionReducer(
 		SelectorFunctionKeys<T, ?> rawKeys,
 		ReduceFunction<T> function,
@@ -185,14 +183,13 @@ public class ReduceOperator<IN> extends SingleInputUdfOperator<IN, IN, ReduceOpe
 		String name,
 		Operator<T> input,
 		int parallelism,
-		CombineHint hint)
-	{
+		CombineHint hint) {
 		@SuppressWarnings("unchecked")
 		final SelectorFunctionKeys<T, K> keys = (SelectorFunctionKeys<T, K>) rawKeys;
-		
+
 		TypeInformation<Tuple2<K, T>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys);
 		Operator<Tuple2<K, T>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys);
-		
+
 		PlanUnwrappingReduceOperator<T, K> reducer = new PlanUnwrappingReduceOperator<>(function, keys, name, inputType, typeInfoWithKey);
 		reducer.setInput(keyedInput);
 		reducer.setParallelism(parallelism);

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java
index 359f4b8..b718a56 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java
@@ -24,47 +24,46 @@ import org.apache.flink.api.java.DataSet;
 
 /**
  * Base class for operations that operates on a single input data set.
- * 
+ *
  * @param <IN> The data type of the input data set.
  * @param <OUT> The data type of the returned data set.
  */
 @Public
 public abstract class SingleInputOperator<IN, OUT, O extends SingleInputOperator<IN, OUT, O>> extends Operator<OUT, O> {
-	
+
 	private final DataSet<IN> input;
-	
-	
+
 	protected SingleInputOperator(DataSet<IN> input, TypeInformation<OUT> resultType) {
 		super(input.getExecutionEnvironment(), resultType);
 		this.input = input;
 	}
-	
+
 	/**
 	 * Gets the data set that this operation uses as its input.
-	 * 
+	 *
 	 * @return The data set that this operation uses as its input.
 	 */
 	public DataSet<IN> getInput() {
 		return this.input;
 	}
-	
+
 	/**
 	 * Gets the type information of the data type of the input data set.
 	 * This method returns equivalent information as {@code getInput().getType()}.
-	 * 
+	 *
 	 * @return The input data type.
 	 */
 	public TypeInformation<IN> getInputType() {
 		return this.input.getType();
 	}
-	
+
 	/**
 	 * Translates this operation to a data flow operator of the common data flow API.
-	 * 
+	 *
 	 * @param input The data flow operator that produces this operation's input data.
 	 * @return The translated data flow operator.
 	 */
 	protected abstract org.apache.flink.api.common.operators.Operator<OUT> translateToDataFlow(
 			org.apache.flink.api.common.operators.Operator<IN> input);
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputUdfOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputUdfOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputUdfOperator.java
index eb485fe..4ce44aa 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputUdfOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputUdfOperator.java
@@ -18,12 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.lang.annotation.Annotation;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
@@ -39,6 +33,12 @@ import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.typeutils.TypeInfoParser;
 import org.apache.flink.configuration.Configuration;
 
+import java.lang.annotation.Annotation;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
 import static java.util.Objects.requireNonNull;
 
 /**
@@ -46,16 +46,15 @@ import static java.util.Objects.requireNonNull;
  * user-defined functions (UDFs). The UDFs encapsulated by this operator are naturally UDFs that
  * have one input (such as {@link org.apache.flink.api.common.functions.RichMapFunction} or
  * {@link org.apache.flink.api.common.functions.RichReduceFunction}).
- * <p>
- * This class encapsulates utilities for the UDFs, such as broadcast variables, parameterization
+ *
+ * <p>This class encapsulates utilities for the UDFs, such as broadcast variables, parameterization
  * through configuration objects, and semantic properties.
  * @param <IN> The data type of the input data set.
  * @param <OUT> The data type of the returned data set.
  */
 @Public
 public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOperator<IN, OUT, O>>
-	extends SingleInputOperator<IN, OUT, O> implements UdfOperator<O>
-{
+	extends SingleInputOperator<IN, OUT, O> implements UdfOperator<O> {
 	private Configuration parameters;
 
 	private Map<String, DataSet<?>> broadcastVariables;
@@ -77,8 +76,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	protected SingleInputUdfOperator(DataSet<IN> input, TypeInformation<OUT> resultType) {
 		super(input, resultType);
 	}
-	
-	
+
 	protected abstract Function getFunction();
 
 	// --------------------------------------------------------------------------------------------
@@ -102,7 +100,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 		if (name == null) {
 			throw new IllegalArgumentException("Broadcast variable name must not be null.");
 		}
-		
+
 		if (this.broadcastVariables == null) {
 			this.broadcastVariables = new HashMap<String, DataSet<?>>();
 		}
@@ -115,46 +113,34 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	}
 
 	/**
-	 * <p>
 	 * Adds semantic information about forwarded fields of the user-defined function.
 	 * The forwarded fields information declares fields which are never modified by the function and
 	 * which are forwarded at the same position to the output or unchanged copied to another position in the output.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields that are forwarded at the same position are specified by their position.
+	 * <p>Fields that are forwarded at the same position are specified by their position.
 	 * The specified position must be valid for the input and output data type and have the same type.
 	 * For example <code>withForwardedFields("f2")</code> declares that the third field of a Java input tuple is
 	 * copied to the third field of an output tuple.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields which are unchanged copied to another position in the output are declared by specifying the
+	 * <p>Fields which are unchanged copied to another position in the output are declared by specifying the
 	 * source field reference in the input and the target field reference in the output.
 	 * {@code withForwardedFields("f0->f2")} denotes that the first field of the Java input tuple is
 	 * unchanged copied to the third field of the Java output tuple. When using a wildcard ("*") ensure that
 	 * the number of declared fields and their types in input and output type match.
-	 * </p>
 	 *
-	 * <p>
-	 * Multiple forwarded fields can be annotated in one ({@code withForwardedFields("f2; f3->f0; f4")})
+	 * <p>Multiple forwarded fields can be annotated in one ({@code withForwardedFields("f2; f3->f0; f4")})
 	 * or separate Strings ({@code withForwardedFields("f2", "f3->f0", "f4")}).
 	 * Please refer to the JavaDoc of {@link org.apache.flink.api.common.functions.Function} or Flink's documentation for
 	 * details on field references such as nested fields and wildcard.
-	 * </p>
 	 *
-	 * <p>
-	 * It is not possible to override existing semantic information about forwarded fields which was
+	 * <p>It is not possible to override existing semantic information about forwarded fields which was
 	 * for example added by a {@link org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields} class annotation.
-	 * </p>
 	 *
-	 * <p>
-	 * <b>NOTE: Adding semantic information for functions is optional!
+	 * <p><b>NOTE: Adding semantic information for functions is optional!
 	 * If used correctly, semantic information can help the Flink optimizer to generate more efficient execution plans.
 	 * However, incorrect semantic information can cause the optimizer to generate incorrect execution plans which compute wrong results!
 	 * So be careful when adding semantic information.
 	 * </b>
-	 * </p>
 	 *
 	 * @param forwardedFields A list of field forward expressions.
 	 * @return This operator with annotated forwarded field information.
@@ -164,17 +150,17 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 */
 	public O withForwardedFields(String... forwardedFields) {
 
-		if(this.udfSemantics == null) {
+		if (this.udfSemantics == null) {
 			// extract semantic properties from function annotations
 			setSemanticProperties(extractSemanticAnnotations(getFunction().getClass()));
 		}
 
-		if(this.udfSemantics == null
+		if (this.udfSemantics == null
 				|| this.analyzedUdfSemantics) { // discard analyzed semantic properties
 			setSemanticProperties(new SingleInputSemanticProperties());
 			SemanticPropUtil.getSemanticPropsSingleFromString(this.udfSemantics, forwardedFields, null, null, this.getInputType(), this.getResultType());
 		} else {
-			if(udfWithForwardedFieldsAnnotation(getFunction().getClass())) {
+			if (udfWithForwardedFieldsAnnotation(getFunction().getClass())) {
 				// refuse semantic information as it would override the function annotation
 				throw new SemanticProperties.InvalidSemanticAnnotationException("Forwarded field information " +
 						"has already been added by a function annotation for this operator. " +
@@ -202,7 +188,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 * <p>Classes can be used as type hints for non-generic types (classes without generic parameters),
 	 * but not for generic types like for example Tuples. For those generic types, please
 	 * use the {@link #returns(TypeHint)} method.
-	 * 
+	 *
 	 * <p>Use this method the following way:
 	 * <pre>{@code
 	 *     DataSet<String[]> result =
@@ -215,7 +201,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 */
 	public O returns(Class<OUT> typeClass) {
 		requireNonNull(typeClass, "type class must not be null");
-		
+
 		try {
 			return returns(TypeInformation.of(typeClass));
 		}
@@ -225,7 +211,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 					"please use the 'returns(TypeHint)' method instead.", e);
 		}
 	}
-	
+
 	/**
 	 * Adds a type information hint about the return type of this operator. This method
 	 * can be used in cases where Flink cannot determine automatically what the produced
@@ -244,7 +230,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 */
 	public O returns(TypeHint<OUT> typeHint) {
 		requireNonNull(typeHint, "TypeHint must not be null");
-	
+
 		try {
 			return returns(TypeInformation.of(typeHint));
 		}
@@ -259,7 +245,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 * can be used in cases where Flink cannot determine automatically what the produced
 	 * type of a function is. That can be the case if the function uses generic type variables
 	 * in the return type that cannot be inferred from the input type.
-	 * 
+	 *
 	 * <p>In most cases, the methods {@link #returns(Class)} and {@link #returns(TypeHint)}
 	 * are preferable.
 	 *
@@ -268,22 +254,22 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 */
 	public O returns(TypeInformation<OUT> typeInfo) {
 		requireNonNull(typeInfo, "TypeInformation must not be null");
-		
+
 		fillInType(typeInfo);
 		@SuppressWarnings("unchecked")
 		O returnType = (O) this;
 		return returnType;
 	}
-	
+
 	/**
-	 * Adds a type information hint about the return type of this operator. 
-	 * 
-	 * <p>
-	 * Type hints are important in cases where the Java compiler
+	 * Adds a type information hint about the return type of this operator.
+	 *
+	 *
+	 * <p>Type hints are important in cases where the Java compiler
 	 * throws away generic type information necessary for efficient execution.
-	 * 
-	 * <p>
-	 * This method takes a type information string that will be parsed. A type information string can contain the following
+	 *
+	 *
+	 * <p>This method takes a type information string that will be parsed. A type information string can contain the following
 	 * types:
 	 *
 	 * <ul>
@@ -303,13 +289,13 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 	 * <li>Enum types such as <code>Enum&lt;org.my.CustomEnum&gt;</code></li>
 	 * </ul>
 	 *
-	 * Example:
+	 * <p>Example:
 	 * <code>"Tuple2&lt;String,Tuple2&lt;Integer,org.my.MyJob$Pojo&lt;word=String&gt;&gt;&gt;"</code>
 	 *
 	 * @param typeInfoString
 	 *            type information string to be parsed
 	 * @return This operator with a given return type hint.
-	 * 
+	 *
 	 * @deprecated Please use {@link #returns(Class)} or {@link #returns(TypeHint)} instead.
 	 */
 	@Deprecated
@@ -320,7 +306,7 @@ public abstract class SingleInputUdfOperator<IN, OUT, O extends SingleInputUdfOp
 		}
 		return returns(TypeInfoParser.<OUT>parse(typeInfoString));
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	// Accessors
 	// --------------------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java
index 7f30a30..862ad89 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/SortPartitionOperator.java
@@ -58,7 +58,6 @@ public class SortPartitionOperator<T> extends SingleInputOperator<T, T, SortPart
 		this.sortLocationName = sortLocationName;
 	}
 
-
 	public SortPartitionOperator(DataSet<T> dataSet, int sortField, Order sortOrder, String sortLocationName) {
 		this(dataSet, sortLocationName);
 		this.useKeySelector = false;

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/SortedGrouping.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/SortedGrouping.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/SortedGrouping.java
index 11645df..2e3709d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/SortedGrouping.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/SortedGrouping.java
@@ -19,40 +19,40 @@
 package org.apache.flink.api.java.operators;
 
 import org.apache.flink.annotation.Public;
+import org.apache.flink.api.common.InvalidProgramException;
 import org.apache.flink.api.common.functions.GroupCombineFunction;
+import org.apache.flink.api.common.functions.GroupReduceFunction;
+import org.apache.flink.api.common.functions.Partitioner;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
+import org.apache.flink.api.common.operators.Order;
 import org.apache.flink.api.common.operators.Ordering;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.Utils;
 import org.apache.flink.api.java.functions.FirstReducer;
-
-import java.util.Arrays;
-
-import org.apache.flink.api.common.InvalidProgramException;
-import org.apache.flink.api.common.functions.GroupReduceFunction;
-import org.apache.flink.api.common.functions.Partitioner;
-import org.apache.flink.api.common.operators.Order;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 import org.apache.flink.util.Preconditions;
 
+import java.util.Arrays;
+
 /**
- * SortedGrouping is an intermediate step for a transformation on a grouped and sorted DataSet.<br>
- * The following transformation can be applied on sorted groups:
+ * SortedGrouping is an intermediate step for a transformation on a grouped and sorted DataSet.
+ *
+ * <p>The following transformation can be applied on sorted groups:
  * <ul>
  * 	<li>{@link SortedGrouping#reduceGroup(org.apache.flink.api.common.functions.GroupReduceFunction)},</li>
  * </ul>
- * 
+ *
  * @param <T> The type of the elements of the sorted and grouped DataSet.
  */
 @Public
 public class SortedGrouping<T> extends Grouping<T> {
-	
+
 	private int[] groupSortKeyPositions;
 	private Order[] groupSortOrders;
 	private Keys.SelectorFunctionKeys<T, ?> groupSortSelectorFunctionKey = null;
-	
+
 	/*
 	 * int sorting keys for tuples
 	 */
@@ -70,7 +70,7 @@ public class SortedGrouping<T> extends Grouping<T> {
 		this.groupSortOrders = new Order[groupSortKeyPositions.length];
 		Arrays.fill(this.groupSortOrders, order);
 	}
-	
+
 	/*
 	 * String sorting for Pojos and tuples
 	 */
@@ -99,8 +99,8 @@ public class SortedGrouping<T> extends Grouping<T> {
 			throw new InvalidProgramException("Sorting on KeySelector keys only works with KeySelector grouping.");
 		}
 		TypeInformation<?> sortKeyType = keySelector.getKeyType();
-		if(!sortKeyType.isSortKeyType()) {
-			throw new InvalidProgramException("Key type " + sortKeyType +" is not sortable.");
+		if (!sortKeyType.isSortKeyType()) {
+			throw new InvalidProgramException("Key type " + sortKeyType + " is not sortable.");
 		}
 
 		this.groupSortKeyPositions = keySelector.computeLogicalKeyPositions();
@@ -112,13 +112,13 @@ public class SortedGrouping<T> extends Grouping<T> {
 		this.groupSortOrders = new Order[groupSortKeyPositions.length];
 		Arrays.fill(this.groupSortOrders, order);
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	protected int[] getGroupSortKeyPositions() {
 		return this.groupSortKeyPositions;
 	}
-	
+
 	protected Order[] getGroupSortOrders() {
 		return this.groupSortOrders;
 	}
@@ -126,24 +126,24 @@ public class SortedGrouping<T> extends Grouping<T> {
 	protected Ordering getGroupOrdering() {
 
 		Ordering o = new Ordering();
-		for(int i=0; i < this.groupSortKeyPositions.length; i++) {
+		for (int i = 0; i < this.groupSortKeyPositions.length; i++) {
 			o.appendOrdering(this.groupSortKeyPositions[i], null, this.groupSortOrders[i]);
 		}
 
 		return o;
 	}
-	
+
 	/**
 	 * Uses a custom partitioner for the grouping.
-	 * 
+	 *
 	 * @param partitioner The custom partitioner.
 	 * @return The grouping object itself, to allow for method chaining.
 	 */
 	public SortedGrouping<T> withPartitioner(Partitioner<?> partitioner) {
 		Preconditions.checkNotNull(partitioner);
-		
+
 		getKeys().validateCustomPartitioner(partitioner, null);
-		
+
 		this.customPartitioner = partitioner;
 		return this;
 	}
@@ -153,14 +153,15 @@ public class SortedGrouping<T> extends Grouping<T> {
 	}
 
 	/**
-	 * Applies a GroupReduce transformation on a grouped and sorted {@link DataSet}.<br>
-	 * The transformation calls a {@link org.apache.flink.api.common.functions.RichGroupReduceFunction} for each group of the DataSet.
+	 * Applies a GroupReduce transformation on a grouped and sorted {@link DataSet}.
+	 *
+	 * <p>The transformation calls a {@link org.apache.flink.api.common.functions.RichGroupReduceFunction} for each group of the DataSet.
 	 * A GroupReduceFunction can iterate over all elements of a group and emit any
 	 *   number of output elements including none.
-	 * 
+	 *
 	 * @param reducer The GroupReduceFunction that is applied on each group of the DataSet.
 	 * @return A GroupReduceOperator that represents the reduced DataSet.
-	 * 
+	 *
 	 * @see org.apache.flink.api.common.functions.RichGroupReduceFunction
 	 * @see GroupReduceOperator
 	 * @see DataSet
@@ -196,33 +197,34 @@ public class SortedGrouping<T> extends Grouping<T> {
 		return new GroupCombineOperator<>(this, resultType, inputDataSet.clean(combiner), Utils.getCallLocationName());
 	}
 
-	
 	/**
-	 * Returns a new set containing the first n elements in this grouped and sorted {@link DataSet}.<br>
+	 * Returns a new set containing the first n elements in this grouped and sorted {@link DataSet}.
 	 * @param n The desired number of elements for each group.
 	 * @return A GroupReduceOperator that represents the DataSet containing the elements.
 	*/
 	public GroupReduceOperator<T, T> first(int n) {
-		if(n < 1) {
+		if (n < 1) {
 			throw new InvalidProgramException("Parameter n of first(n) must be at least 1.");
 		}
-		
+
 		return reduceGroup(new FirstReducer<T>(n));
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	//  Group Operations
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
-	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group on the specified field in the specified {@link Order}.<br>
-	 * <b>Note: Only groups of Tuple or Pojo elements can be sorted.</b><br>
-	 * Groups can be sorted by multiple fields by chaining {@link #sortGroup(int, Order)} calls.
-	 * 
+	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group on the specified field in the specified {@link Order}.
+	 *
+	 * <p><b>Note: Only groups of Tuple or Pojo elements can be sorted.</b>
+	 *
+	 * <p>Groups can be sorted by multiple fields by chaining {@link #sortGroup(int, Order)} calls.
+	 *
 	 * @param field The Tuple field on which the group is sorted.
 	 * @param order The Order in which the specified Tuple field is sorted.
 	 * @return A SortedGrouping with specified order of group element.
-	 * 
+	 *
 	 * @see org.apache.flink.api.java.tuple.Tuple
 	 * @see Order
 	 */
@@ -241,9 +243,11 @@ public class SortedGrouping<T> extends Grouping<T> {
 	}
 
 	/**
-	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} or POJO elements within a group on the specified field in the specified {@link Order}.<br>
-	 * <b>Note: Only groups of Tuple or Pojo elements can be sorted.</b><br>
-	 * Groups can be sorted by multiple fields by chaining {@link #sortGroup(String, Order)} calls.
+	 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} or POJO elements within a group on the specified field in the specified {@link Order}.
+	 *
+	 * <p><b>Note: Only groups of Tuple or Pojo elements can be sorted.</b>
+	 *
+	 * <p>Groups can be sorted by multiple fields by chaining {@link #sortGroup(String, Order)} calls.
 	 *
 	 * @param field The Tuple or Pojo field on which the group is sorted.
 	 * @param order The Order in which the specified field is sorted.
@@ -265,17 +269,17 @@ public class SortedGrouping<T> extends Grouping<T> {
 		addSortGroupInternal(ek, order);
 		return this;
 	}
-	
+
 	private void addSortGroupInternal(ExpressionKeys<T> ek, Order order) {
 		Preconditions.checkArgument(order != null, "Order can not be null");
 		int[] additionalKeyPositions = ek.computeLogicalKeyPositions();
-		
+
 		int newLength = this.groupSortKeyPositions.length + additionalKeyPositions.length;
 		this.groupSortKeyPositions = Arrays.copyOf(this.groupSortKeyPositions, newLength);
 		this.groupSortOrders = Arrays.copyOf(this.groupSortOrders, newLength);
 		int pos = newLength - additionalKeyPositions.length;
 		int off = newLength - additionalKeyPositions.length;
-		for(;pos < newLength; pos++) {
+		for (; pos < newLength; pos++) {
 			this.groupSortKeyPositions[pos] = additionalKeyPositions[pos - off];
 			this.groupSortOrders[pos] = order; // use the same order
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputOperator.java
index 28dec32..b915849 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputOperator.java
@@ -25,18 +25,17 @@ import org.apache.flink.util.Preconditions;
 
 /**
  * Base class for operations that operates on two input data sets.
- * 
+ *
  * @param <IN1> The data type of the first input data set.
  * @param <IN2> The data type of the second input data set.
  * @param <OUT> The data type of the returned data set.
  */
 @Public
 public abstract class TwoInputOperator<IN1, IN2, OUT, O extends TwoInputOperator<IN1, IN2, OUT, O>> extends Operator<OUT, O> {
-	
+
 	private final DataSet<IN1> input1;
 	private final DataSet<IN2> input2;
-	
-	
+
 	protected TwoInputOperator(DataSet<IN1> input1, DataSet<IN2> input2, TypeInformation<OUT> resultType) {
 		super(Preconditions.checkNotNull(input1, "input1 is null").getExecutionEnvironment(), resultType);
 		Preconditions.checkNotNull(input2, "input2 is null");
@@ -44,48 +43,48 @@ public abstract class TwoInputOperator<IN1, IN2, OUT, O extends TwoInputOperator
 		this.input1 = input1;
 		this.input2 = input2;
 	}
-	
+
 	/**
 	 * Gets the data set that this operation uses as its first input.
-	 * 
+	 *
 	 * @return The data set that this operation uses as its first input.
 	 */
 	public DataSet<IN1> getInput1() {
 		return this.input1;
 	}
-	
+
 	/**
 	 * Gets the data set that this operation uses as its second input.
-	 * 
+	 *
 	 * @return The data set that this operation uses as its second input.
 	 */
 	public DataSet<IN2> getInput2() {
 		return this.input2;
 	}
-	
+
 	/**
 	 * Gets the type information of the data type of the first input data set.
 	 * This method returns equivalent information as {@code getInput1().getType()}.
-	 * 
+	 *
 	 * @return The first input data type.
 	 */
 	public TypeInformation<IN1> getInput1Type() {
 		return this.input1.getType();
 	}
-	
+
 	/**
 	 * Gets the type information of the data type of the second input data set.
 	 * This method returns equivalent information as {@code getInput2().getType()}.
-	 * 
+	 *
 	 * @return The second input data type.
 	 */
 	public TypeInformation<IN2> getInput2Type() {
 		return this.input2.getType();
 	}
-	
+
 	/**
 	 * Translates this java API operator into a common API operator with two inputs.
-	 * 
+	 *
 	 * @param input1 The first input of the operation, as a common API operator.
 	 * @param input2 The second input of the operation, as a common API operator.
 	 * @return The created common API operator.

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputUdfOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputUdfOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputUdfOperator.java
index 695ed3a..b78d17e 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputUdfOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/TwoInputUdfOperator.java
@@ -18,12 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.lang.annotation.Annotation;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
@@ -39,6 +33,12 @@ import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.typeutils.TypeInfoParser;
 import org.apache.flink.configuration.Configuration;
 
+import java.lang.annotation.Annotation;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
 import static java.util.Objects.requireNonNull;
 
 /**
@@ -46,8 +46,8 @@ import static java.util.Objects.requireNonNull;
  * user-defined functions (UDFs). The UDFs encapsulated by this operator are naturally UDFs that
  * have two inputs (such as {@link org.apache.flink.api.common.functions.RichJoinFunction} or
  * {@link org.apache.flink.api.common.functions.RichCoGroupFunction}).
- * <p>
- * This class encapsulates utilities for the UDFs, such as broadcast variables, parameterization
+ *
+ * <p>This class encapsulates utilities for the UDFs, such as broadcast variables, parameterization
  * through configuration objects, and semantic properties.
  *
  * @param <IN1> The data type of the first input data set.
@@ -56,8 +56,7 @@ import static java.util.Objects.requireNonNull;
  */
 @Public
 public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOperator<IN1, IN2, OUT, O>>
-	extends TwoInputOperator<IN1, IN2, OUT, O> implements UdfOperator<O>
-{
+	extends TwoInputOperator<IN1, IN2, OUT, O> implements UdfOperator<O> {
 	private Configuration parameters;
 
 	private Map<String, DataSet<?>> broadcastVariables;
@@ -104,7 +103,7 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 		if (name == null) {
 			throw new IllegalArgumentException("Broadcast variable name must not be null.");
 		}
-		
+
 		if (this.broadcastVariables == null) {
 			this.broadcastVariables = new HashMap<String, DataSet<?>>();
 		}
@@ -117,46 +116,34 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	}
 
 	/**
-	 * <p>
 	 * Adds semantic information about forwarded fields of the first input of the user-defined function.
 	 * The forwarded fields information declares fields which are never modified by the function and
 	 * which are forwarded at the same position to the output or unchanged copied to another position in the output.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields that are forwarded at the same position are specified by their position.
+	 * <p>Fields that are forwarded at the same position are specified by their position.
 	 * The specified position must be valid for the input and output data type and have the same type.
 	 * For example <code>withForwardedFieldsFirst("f2")</code> declares that the third field of a Java input tuple
 	 * from the first input is copied to the third field of an output tuple.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields which are unchanged copied from the first input to another position in the output are declared
+	 * <p>Fields which are unchanged copied from the first input to another position in the output are declared
 	 * by specifying the source field reference in the first input and the target field reference in the output.
 	 * {@code withForwardedFieldsFirst("f0->f2")} denotes that the first field of the first input Java tuple is
 	 * unchanged copied to the third field of the Java output tuple. When using a wildcard ("*") ensure that
 	 * the number of declared fields and their types in first input and output type match.
-	 * </p>
 	 *
-	 * <p>
-	 * Multiple forwarded fields can be annotated in one ({@code withForwardedFieldsFirst("f2; f3->f0; f4")})
+	 * <p>Multiple forwarded fields can be annotated in one ({@code withForwardedFieldsFirst("f2; f3->f0; f4")})
 	 * or separate Strings ({@code withForwardedFieldsFirst("f2", "f3->f0", "f4")}).
 	 * Please refer to the JavaDoc of {@link org.apache.flink.api.common.functions.Function} or Flink's documentation for
 	 * details on field references such as nested fields and wildcard.
-	 * </p>
 	 *
-	 * <p>
-	 * It is not possible to override existing semantic information about forwarded fields of the first input which was
+	 * <p>It is not possible to override existing semantic information about forwarded fields of the first input which was
 	 * for example added by a {@link org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst} class annotation.
-	 * </p>
 	 *
-	 * <p>
-	 * <b>NOTE: Adding semantic information for functions is optional!
+	 * <p><b>NOTE: Adding semantic information for functions is optional!
 	 * If used correctly, semantic information can help the Flink optimizer to generate more efficient execution plans.
 	 * However, incorrect semantic information can cause the optimizer to generate incorrect execution plans which compute wrong results!
 	 * So be careful when adding semantic information.
 	 * </b>
-	 * </p>
 	 *
 	 * @param forwardedFieldsFirst A list of forwarded field expressions for the first input of the function.
 	 * @return This operator with annotated forwarded field information.
@@ -171,12 +158,12 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 			setSemanticProperties(extractSemanticAnnotationsFromUdf(getFunction().getClass()));
 		}
 
-		if(this.udfSemantics == null || this.analyzedUdfSemantics) {
+		if (this.udfSemantics == null || this.analyzedUdfSemantics) {
 			setSemanticProperties(new DualInputSemanticProperties());
 			SemanticPropUtil.getSemanticPropsDualFromString(this.udfSemantics, forwardedFieldsFirst, null,
 					null, null, null, null, getInput1Type(), getInput2Type(), getResultType());
 		} else {
-			if(this.udfWithForwardedFieldsFirstAnnotation(getFunction().getClass())) {
+			if (this.udfWithForwardedFieldsFirstAnnotation(getFunction().getClass())) {
 				// refuse semantic information as it would override the function annotation
 				throw new SemanticProperties.InvalidSemanticAnnotationException("Forwarded field information " +
 						"has already been added by a function annotation for the first input of this operator. " +
@@ -192,46 +179,34 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	}
 
 	/**
-	 * <p>
 	 * Adds semantic information about forwarded fields of the second input of the user-defined function.
 	 * The forwarded fields information declares fields which are never modified by the function and
 	 * which are forwarded at the same position to the output or unchanged copied to another position in the output.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields that are forwarded at the same position are specified by their position.
+	 * <p>Fields that are forwarded at the same position are specified by their position.
 	 * The specified position must be valid for the input and output data type and have the same type.
 	 * For example <code>withForwardedFieldsSecond("f2")</code> declares that the third field of a Java input tuple
 	 * from the second input is copied to the third field of an output tuple.
-	 * </p>
 	 *
-	 * <p>
-	 * Fields which are unchanged copied from the second input to another position in the output are declared
+	 * <p>Fields which are unchanged copied from the second input to another position in the output are declared
 	 * by specifying the source field reference in the second input and the target field reference in the output.
 	 * {@code withForwardedFieldsSecond("f0->f2")} denotes that the first field of the second input Java tuple is
 	 * unchanged copied to the third field of the Java output tuple. When using a wildcard ("*") ensure that
 	 * the number of declared fields and their types in second input and output type match.
-	 * </p>
 	 *
-	 * <p>
-	 * Multiple forwarded fields can be annotated in one ({@code withForwardedFieldsSecond("f2; f3->f0; f4")})
+	 * <p>Multiple forwarded fields can be annotated in one ({@code withForwardedFieldsSecond("f2; f3->f0; f4")})
 	 * or separate Strings ({@code withForwardedFieldsSecond("f2", "f3->f0", "f4")}).
 	 * Please refer to the JavaDoc of {@link org.apache.flink.api.common.functions.Function} or Flink's documentation for
 	 * details on field references such as nested fields and wildcard.
-	 * </p>
 	 *
-	 * <p>
-	 * It is not possible to override existing semantic information about forwarded fields of the second input which was
+	 * <p>It is not possible to override existing semantic information about forwarded fields of the second input which was
 	 * for example added by a {@link org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond} class annotation.
-	 * </p>
 	 *
-	 * <p>
-	 * <b>NOTE: Adding semantic information for functions is optional!
+	 * <p><b>NOTE: Adding semantic information for functions is optional!
 	 * If used correctly, semantic information can help the Flink optimizer to generate more efficient execution plans.
 	 * However, incorrect semantic information can cause the optimizer to generate incorrect execution plans which compute wrong results!
 	 * So be careful when adding semantic information.
 	 * </b>
-	 * </p>
 	 *
 	 * @param forwardedFieldsSecond A list of forwarded field expressions for the second input of the function.
 	 * @return This operator with annotated forwarded field information.
@@ -246,12 +221,12 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 			setSemanticProperties(extractSemanticAnnotationsFromUdf(getFunction().getClass()));
 		}
 
-		if(this.udfSemantics == null || this.analyzedUdfSemantics) {
+		if (this.udfSemantics == null || this.analyzedUdfSemantics) {
 			setSemanticProperties(new DualInputSemanticProperties());
 			SemanticPropUtil.getSemanticPropsDualFromString(this.udfSemantics, null, forwardedFieldsSecond,
 					null, null, null, null, getInput1Type(), getInput2Type(), getResultType());
 		} else {
-			if(udfWithForwardedFieldsSecondAnnotation(getFunction().getClass())) {
+			if (udfWithForwardedFieldsSecondAnnotation(getFunction().getClass())) {
 				// refuse semantic information as it would override the function annotation
 				throw new SemanticProperties.InvalidSemanticAnnotationException("Forwarded field information " +
 						"has already been added by a function annotation for the second input of this operator. " +
@@ -282,7 +257,7 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	 *
 	 * <p>Use this method the following way:
 	 * <pre>{@code
-	 *     DataSet<String[]> result = 
+	 *     DataSet<String[]> result =
 	 *         data1.join(data2).where("id").equalTo("fieldX")
 	 *              .with(new JoinFunctionWithNonInferrableReturnType())
 	 *              .returns(String[].class);
@@ -312,7 +287,7 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	 *
 	 * <p>Use this method the following way:
 	 * <pre>{@code
-	 *     DataSet<Tuple2<String, Double>> result = 
+	 *     DataSet<Tuple2<String, Double>> result =
 	 *         data1.join(data2).where("id").equalTo("fieldX")
 	 *              .with(new JoinFunctionWithNonInferrableReturnType())
 	 *              .returns(new TypeHint<Tuple2<String, Double>>(){});
@@ -355,14 +330,14 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	}
 
 	/**
-	 * Adds a type information hint about the return type of this operator. 
+	 * Adds a type information hint about the return type of this operator.
 	 *
-	 * <p>
-	 * Type hints are important in cases where the Java compiler
+	 *
+	 * <p>Type hints are important in cases where the Java compiler
 	 * throws away generic type information necessary for efficient execution.
 	 *
-	 * <p>
-	 * This method takes a type information string that will be parsed. A type information string can contain the following
+	 *
+	 * <p>This method takes a type information string that will be parsed. A type information string can contain the following
 	 * types:
 	 *
 	 * <ul>
@@ -382,7 +357,7 @@ public abstract class TwoInputUdfOperator<IN1, IN2, OUT, O extends TwoInputUdfOp
 	 * <li>Enum types such as <code>Enum&lt;org.my.CustomEnum&gt;</code></li>
 	 * </ul>
 	 *
-	 * Example:
+	 * <p>Example:
 	 * <code>"Tuple2&lt;String,Tuple2&lt;Integer,org.my.MyJob$Pojo&lt;word=String&gt;&gt;&gt;"</code>
 	 *
 	 * @param typeInfoString

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperator.java
index e4f7888..249a5cb 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperator.java
@@ -18,14 +18,13 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.Map;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.operators.SemanticProperties;
+import org.apache.flink.api.java.DataSet;
 import org.apache.flink.configuration.Configuration;
 
-import org.apache.flink.api.java.DataSet;
+import java.util.Map;
 
 /**
  * This interface marks operators as operators that execute user-defined functions (UDFs), such as
@@ -35,60 +34,60 @@ import org.apache.flink.api.java.DataSet;
  */
 @Public
 public interface UdfOperator<O extends UdfOperator<O>> {
-	
+
 	// --------------------------------------------------------------------------------------------
 	// Accessors
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Gets the configuration parameters that will be passed to the UDF's open method
 	 * {@link org.apache.flink.api.common.functions.AbstractRichFunction#open(Configuration)}.
 	 * The configuration is set via the {@link #withParameters(Configuration)}
 	 * method.
-	 * 
+	 *
 	 * @return The configuration parameters for the UDF.
 	 */
 	Configuration getParameters();
-	
+
 	/**
 	 * Gets the broadcast sets (name and data set) that have been added to context of the UDF.
 	 * Broadcast sets are added to a UDF via the method {@link #withBroadcastSet(DataSet, String)}.
-	 * 
+	 *
 	 * @return The broadcast data sets that have been added to this UDF.
 	 */
 	@Internal
 	Map<String, DataSet<?>> getBroadcastSets();
-	
+
 	/**
 	 * Gets the semantic properties that have been set for the user-defined functions (UDF).
-	 * 
+	 *
 	 * @return The semantic properties of the UDF.
 	 */
 	@Internal
 	SemanticProperties getSemanticProperties();
-	
+
 	// --------------------------------------------------------------------------------------------
 	// Fluent API methods
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Sets the configuration parameters for the UDF. These are optional parameters that are passed
 	 * to the UDF in the {@link org.apache.flink.api.common.functions.AbstractRichFunction#open(Configuration)} method.
-	 * 
+	 *
 	 * @param parameters The configuration parameters for the UDF.
 	 * @return The operator itself, to allow chaining function calls.
 	 */
 	O withParameters(Configuration parameters);
-	
+
 	/**
 	 * Adds a certain data set as a broadcast set to this operator. Broadcasted data sets are available at all
 	 * parallel instances of this operator. A broadcast data set is registered under a certain name, and can be
 	 * retrieved under that name from the operators runtime context via
 	 * {@link org.apache.flink.api.common.functions.RuntimeContext#getBroadcastVariable(String)}.
-	 * 
-	 * The runtime context itself is available in all UDFs via
+	 *
+	 * <p>The runtime context itself is available in all UDFs via
 	 * {@link org.apache.flink.api.common.functions.AbstractRichFunction#getRuntimeContext()}.
-	 * 
+	 *
 	 * @param data The data set to be broadcasted.
 	 * @param name The name under which the broadcast data set retrieved.
 	 * @return The operator itself, to allow chaining function calls.

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperatorUtils.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperatorUtils.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperatorUtils.java
index 660f845..f8e04a9 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperatorUtils.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/UdfOperatorUtils.java
@@ -28,6 +28,7 @@ import org.apache.flink.api.common.operators.SingleInputSemanticProperties;
 import org.apache.flink.api.java.functions.FunctionAnnotation;
 import org.apache.flink.api.java.sca.CodeAnalyzerException;
 import org.apache.flink.api.java.sca.UdfAnalyzer;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,7 +46,7 @@ public final class UdfOperatorUtils {
 		if (mode != CodeAnalysisMode.DISABLE
 				&& !udf.getClass().isAnnotationPresent(FunctionAnnotation.SkipCodeAnalysis.class)) {
 			final String operatorName = operator.getName() != null ? operator.getName()
-					: udfBaseClass.getSimpleName() + " at "+defaultName;
+					: udfBaseClass.getSimpleName() + " at " + defaultName;
 			try {
 				final UdfAnalyzer analyzer = new UdfAnalyzer(udfBaseClass, udf.getClass(), operatorName, operator.getInputType(), null,
 						operator.getResultType(), key, null, mode == CodeAnalysisMode.OPTIMIZE);

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/UnionOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/UnionOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/UnionOperator.java
index 70ad374..0da5e01 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/UnionOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/UnionOperator.java
@@ -25,35 +25,35 @@ import org.apache.flink.api.common.operators.Union;
 import org.apache.flink.api.java.DataSet;
 
 /**
- * Java API operator for union of two data sets
- * 
- * @param <T> The type of the two input data sets and the result data set 
+ * Java API operator for union of two data sets.
+ *
+ * @param <T> The type of the two input data sets and the result data set
  */
 @Public
 public class UnionOperator<T> extends TwoInputOperator<T, T, T, UnionOperator<T>> {
 
 	private final String unionLocationName;
-	
+
 	/**
 	 * Create an operator that produces the union of the two given data sets.
-	 * 
+	 *
 	 * @param input1 The first data set to be unioned.
 	 * @param input2 The second data set to be unioned.
 	 */
 	public UnionOperator(DataSet<T> input1, DataSet<T> input2, String unionLocationName) {
 		super(input1, input2, input1.getType());
-		
+
 		if (!input1.getType().equals(input2.getType())) {
-			throw new InvalidProgramException("Cannot union inputs of different types. Input1=" 
+			throw new InvalidProgramException("Cannot union inputs of different types. Input1="
 					+ input1.getType() + ", input2=" + input2.getType());
 		}
-		
+
 		this.unionLocationName = unionLocationName;
 	}
-	
+
 	/**
 	 * Returns the BinaryNodeTranslation of the Union.
-	 * 
+	 *
 	 * @param input1 The first input of the union, as a common API operator.
 	 * @param input2 The second input of the union, as a common API operator.
 	 * @return The common API union operator.


[3/6] flink git commit: [FLINK-7181] Activate checkstyle flink-java/operators/*

Posted by dw...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java
index 86ee2a2..4b5dc6b 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/JoinOperator.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.Arrays;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
@@ -31,9 +29,12 @@ import org.apache.flink.api.common.functions.RichFlatJoinFunction;
 import org.apache.flink.api.common.operators.BinaryOperatorInformation;
 import org.apache.flink.api.common.operators.DualInputSemanticProperties;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
+import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
-import org.apache.flink.api.common.operators.base.JoinOperatorBase;
 import org.apache.flink.api.common.operators.base.InnerJoinOperatorBase;
+import org.apache.flink.api.common.operators.base.JoinOperatorBase;
 import org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint;
 import org.apache.flink.api.common.operators.base.OuterJoinOperatorBase;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -44,32 +45,53 @@ import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSec
 import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.operators.DeltaIteration.SolutionSetPlaceHolder;
-import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
-import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
+import org.apache.flink.api.java.operators.join.JoinFunctionAssigner;
 import org.apache.flink.api.java.operators.join.JoinOperatorSetsBase;
 import org.apache.flink.api.java.operators.join.JoinType;
-import org.apache.flink.api.java.operators.join.JoinFunctionAssigner;
-import org.apache.flink.api.java.operators.translation.TupleRightUnwrappingJoiner;
 import org.apache.flink.api.java.operators.translation.TupleLeftUnwrappingJoiner;
+import org.apache.flink.api.java.operators.translation.TupleRightUnwrappingJoiner;
 import org.apache.flink.api.java.operators.translation.TupleUnwrappingJoiner;
 import org.apache.flink.api.java.operators.translation.WrappingFunction;
+import org.apache.flink.api.java.tuple.Tuple;
+import org.apache.flink.api.java.tuple.Tuple1;
+import org.apache.flink.api.java.tuple.Tuple10;
+import org.apache.flink.api.java.tuple.Tuple11;
+import org.apache.flink.api.java.tuple.Tuple12;
+import org.apache.flink.api.java.tuple.Tuple13;
+import org.apache.flink.api.java.tuple.Tuple14;
+import org.apache.flink.api.java.tuple.Tuple15;
+import org.apache.flink.api.java.tuple.Tuple16;
+import org.apache.flink.api.java.tuple.Tuple17;
+import org.apache.flink.api.java.tuple.Tuple18;
+import org.apache.flink.api.java.tuple.Tuple19;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple20;
+import org.apache.flink.api.java.tuple.Tuple21;
+import org.apache.flink.api.java.tuple.Tuple22;
+import org.apache.flink.api.java.tuple.Tuple23;
+import org.apache.flink.api.java.tuple.Tuple24;
+import org.apache.flink.api.java.tuple.Tuple25;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.tuple.Tuple5;
+import org.apache.flink.api.java.tuple.Tuple6;
+import org.apache.flink.api.java.tuple.Tuple7;
+import org.apache.flink.api.java.tuple.Tuple8;
+import org.apache.flink.api.java.tuple.Tuple9;
 import org.apache.flink.api.java.typeutils.TupleTypeInfo;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.Preconditions;
 
-//CHECKSTYLE.OFF: AvoidStarImport - Needed for TupleGenerator
-import org.apache.flink.api.java.tuple.*;
-//CHECKSTYLE.ON: AvoidStarImport
+import java.util.Arrays;
 
 /**
- * A {@link DataSet} that is the result of a Join transformation. 
- * 
+ * A {@link DataSet} that is the result of a Join transformation.
+ *
  * @param <I1> The type of the first input DataSet of the Join transformation.
  * @param <I2> The type of the second input DataSet of the Join transformation.
  * @param <OUT> The type of the result of the Join transformation.
- * 
+ *
  * @see DataSet
  */
 @Public
@@ -77,22 +99,20 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 	protected final Keys<I1> keys1;
 	protected final Keys<I2> keys2;
-	
+
 	private final JoinHint joinHint;
 	protected final JoinType joinType;
 
 	private Partitioner<?> customPartitioner;
-	
-	
+
 	protected JoinOperator(DataSet<I1> input1, DataSet<I2> input2,
 			Keys<I1> keys1, Keys<I2> keys2,
-			TypeInformation<OUT> returnType, JoinHint hint, JoinType type)
-	{
+			TypeInformation<OUT> returnType, JoinHint hint, JoinType type) {
 		super(input1, input2, returnType);
-		
+
 		Preconditions.checkNotNull(keys1);
 		Preconditions.checkNotNull(keys2);
-		
+
 		try {
 			if (!keys1.areCompatible(keys2)) {
 				throw new InvalidProgramException("The types of the key fields do not match.");
@@ -125,18 +145,18 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		this.joinHint = hint == null ? InnerJoinOperatorBase.JoinHint.OPTIMIZER_CHOOSES : hint;
 		this.joinType = type;
 	}
-	
+
 	protected Keys<I1> getKeys1() {
 		return this.keys1;
 	}
-	
+
 	protected Keys<I2> getKeys2() {
 		return this.keys2;
 	}
-	
+
 	/**
 	 * Gets the JoinHint that describes how the join is executed.
-	 * 
+	 *
 	 * @return The JoinHint.
 	 */
 	@Internal
@@ -153,14 +173,14 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 	public JoinType getJoinType() {
 		return this.joinType;
 	}
-	
+
 	/**
 	 * Sets a custom partitioner for this join. The partitioner will be called on the join keys to determine
 	 * the partition a key should be assigned to. The partitioner is evaluated on both join inputs in the
 	 * same way.
-	 * <p>
-	 * NOTE: A custom partitioner can only be used with single-field join keys, not with composite join keys.
-	 * 
+	 *
+	 * <p>NOTE: A custom partitioner can only be used with single-field join keys, not with composite join keys.
+	 *
 	 * @param partitioner The custom partitioner to be used.
 	 * @return This join operator, to allow for function chaining.
 	 */
@@ -172,42 +192,43 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		this.customPartitioner = getInput1().clean(partitioner);
 		return this;
 	}
-	
+
 	/**
 	 * Gets the custom partitioner used by this join, or {@code null}, if none is set.
-	 * 
+	 *
 	 * @return The custom partitioner used by this join;
 	 */
 	@Internal
 	public Partitioner<?> getPartitioner() {
 		return customPartitioner;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	// special join types
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
-	 * A Join transformation that applies a {@link JoinFunction} on each pair of joining elements.<br>
-	 * It also represents the {@link DataSet} that is the result of a Join transformation. 
-	 * 
+	 * A Join transformation that applies a {@link JoinFunction} on each pair of joining elements.
+	 *
+	 * <p>It also represents the {@link DataSet} that is the result of a Join transformation.
+	 *
 	 * @param <I1> The type of the first input DataSet of the Join transformation.
 	 * @param <I2> The type of the second input DataSet of the Join transformation.
 	 * @param <OUT> The type of the result of the Join transformation.
-	 * 
+	 *
 	 * @see org.apache.flink.api.common.functions.RichFlatJoinFunction
 	 * @see DataSet
 	 */
 	@Public
 	public static class EquiJoin<I1, I2, OUT> extends JoinOperator<I1, I2, OUT> {
-		
+
 		private final FlatJoinFunction<I1, I2, OUT> function;
-		
+
 		@SuppressWarnings("unused")
 		private boolean preserve1;
 		@SuppressWarnings("unused")
 		private boolean preserve2;
-		
+
 		private final String joinLocationName;
 
 		public EquiJoin(DataSet<I1> input1, DataSet<I2> input2,
@@ -226,11 +247,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 				Keys<I1> keys1, Keys<I2> keys2, FlatJoinFunction<I1, I2, OUT> function,
 				TypeInformation<OUT> returnType, JoinHint hint, String joinLocationName, JoinType type) {
 			super(input1, input2, keys1, keys2, returnType, hint, type);
-			
+
 			if (function == null) {
 				throw new NullPointerException();
 			}
-			
+
 			this.function = function;
 			this.joinLocationName = joinLocationName;
 
@@ -241,7 +262,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 				Keys<I1> keys1, Keys<I2> keys2, FlatJoinFunction<I1, I2, OUT> generatedFunction, JoinFunction<I1, I2, OUT> function,
 				TypeInformation<OUT> returnType, JoinHint hint, String joinLocationName, JoinType type) {
 			super(input1, input2, keys1, keys2, returnType, hint, type);
-			
+
 			this.joinLocationName = joinLocationName;
 
 			if (function == null) {
@@ -252,7 +273,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			UdfOperatorUtils.analyzeDualInputUdf(this, JoinFunction.class, joinLocationName, function, keys1, keys2);
 		}
-		
+
 		@Override
 		protected FlatJoinFunction<I1, I2, OUT> getFunction() {
 			return function;
@@ -265,16 +286,16 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			DualInputSemanticProperties props = super.getSemanticProperties();
 
 			// offset semantic information by extracted key fields
-			if(props != null &&
+			if (props != null &&
 					(this.keys1 instanceof SelectorFunctionKeys ||
 							this.keys2 instanceof SelectorFunctionKeys)) {
 
 				int numFields1 = this.getInput1Type().getTotalFields();
 				int numFields2 = this.getInput2Type().getTotalFields();
 				int offset1 = (this.keys1 instanceof SelectorFunctionKeys) ?
-						((SelectorFunctionKeys<?,?>) this.keys1).getKeyType().getTotalFields() : 0;
+						((SelectorFunctionKeys<?, ?>) this.keys1).getKeyType().getTotalFields() : 0;
 				int offset2 = (this.keys2 instanceof SelectorFunctionKeys) ?
-						((SelectorFunctionKeys<?,?>) this.keys2).getKeyType().getTotalFields() : 0;
+						((SelectorFunctionKeys<?, ?>) this.keys2).getKeyType().getTotalFields() : 0;
 
 				props = SemanticPropUtil.addSourceFieldOffsets(props, numFields1, numFields2, offset1, offset2);
 			}
@@ -371,7 +392,6 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			return builder.build();
 		}
 
-
 		private static final class JoinOperatorBaseBuilder<OUT> {
 
 			private final String name;
@@ -402,7 +422,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 					SelectorFunctionKeys<I1, ?> rawKeys1) {
 
 				@SuppressWarnings("unchecked")
-				SelectorFunctionKeys<I1, K> keys1 = (SelectorFunctionKeys<I1, K>)rawKeys1;
+				SelectorFunctionKeys<I1, K> keys1 = (SelectorFunctionKeys<I1, K>) rawKeys1;
 				TypeInformation<Tuple2<K, I1>> typeInfoWithKey1 = KeyFunctions.createTypeWithKey(keys1);
 				Operator<Tuple2<K, I1>> keyMapper1 = KeyFunctions.appendKeyExtractor(input1, keys1);
 
@@ -414,7 +434,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 					SelectorFunctionKeys<I2, ?> rawKeys2) {
 
 				@SuppressWarnings("unchecked")
-				SelectorFunctionKeys<I2, K> keys2 = (SelectorFunctionKeys<I2, K>)rawKeys2;
+				SelectorFunctionKeys<I2, K> keys2 = (SelectorFunctionKeys<I2, K>) rawKeys2;
 				TypeInformation<Tuple2<K, I2>> typeInfoWithKey2 = KeyFunctions.createTypeWithKey(keys2);
 				Operator<Tuple2<K, I2>> keyMapper2 = KeyFunctions.appendKeyExtractor(input2, keys2);
 
@@ -508,11 +528,12 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			}
 		}
 	}
-	
+
 	/**
-	 * A Join transformation that wraps pairs of joining elements into {@link Tuple2}.<br>
-	 * It also represents the {@link DataSet} that is the result of a Join transformation. 
-	 * 
+	 * A Join transformation that wraps pairs of joining elements into {@link Tuple2}.
+	 *
+	 * <p>It also represents the {@link DataSet} that is the result of a Join transformation.
+	 *
 	 * @param <I1> The type of the first input DataSet of the Join transformation.
 	 * @param <I2> The type of the second input DataSet of the Join transformation.
 	 *
@@ -523,20 +544,20 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 	public static final class DefaultJoin<I1, I2> extends EquiJoin<I1, I2, Tuple2<I1, I2>> implements JoinFunctionAssigner<I1, I2> {
 
 		public DefaultJoin(DataSet<I1> input1, DataSet<I2> input2,
-				Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, String joinLocationName, JoinType type)
-		{
+				Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, String joinLocationName, JoinType type) {
 			super(input1, input2, keys1, keys2,
 				new DefaultFlatJoinFunction<I1, I2>(),
 				new TupleTypeInfo<Tuple2<I1, I2>>(input1.getType(), input2.getType()), hint, joinLocationName, type);
 		}
 
 		/**
-		 * Finalizes a Join transformation by applying a {@link org.apache.flink.api.common.functions.RichFlatJoinFunction} to each pair of joined elements.<br>
-		 * Each JoinFunction call returns exactly one element. 
-		 * 
+		 * Finalizes a Join transformation by applying a {@link org.apache.flink.api.common.functions.RichFlatJoinFunction} to each pair of joined elements.
+		 *
+		 * <p>Each JoinFunction call returns exactly one element.
+		 *
 		 * @param function The JoinFunction that is called for each pair of joined elements.
 		 * @return An EquiJoin that represents the joined result DataSet
-		 * 
+		 *
 		 * @see org.apache.flink.api.common.functions.RichFlatJoinFunction
 		 * @see org.apache.flink.api.java.operators.JoinOperator.EquiJoin
 		 * @see DataSet
@@ -558,8 +579,15 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			return new EquiJoin<>(getInput1(), getInput2(), getKeys1(), getKeys2(), generatedFunction, function, returnType, getJoinHint(), Utils.getCallLocationName(), joinType);
 		}
 
+		/**
+		 * Wrapper around {@link JoinFunction}.
+		 *
+		 * @param <IN1> type of elements of first collection
+		 * @param <IN2> type of elements of second collection
+		 * @param <OUT> type of elements of resulting elements
+		 */
 		@Internal
-		public static class WrappingFlatJoinFunction<IN1, IN2, OUT> extends WrappingFunction<JoinFunction<IN1,IN2,OUT>> implements FlatJoinFunction<IN1, IN2, OUT> {
+		public static class WrappingFlatJoinFunction<IN1, IN2, OUT> extends WrappingFunction<JoinFunction<IN1, IN2, OUT>> implements FlatJoinFunction<IN1, IN2, OUT> {
 
 			private static final long serialVersionUID = 1L;
 
@@ -574,11 +602,12 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Applies a ProjectJoin transformation and projects the first join input<br>
-		 * If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first join input is not a Tuple DataSet, no parameters should be passed.<br>
-		 * 
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * Applies a ProjectJoin transformation and projects the first join input
+		 *
+		 * <p>If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first join input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectSecond(int...)}.
 		 *
@@ -588,7 +617,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
 		 * 					   The order of fields in the output tuple is defined by to the order of field indexes.
 		 * @return A ProjectJoin which represents the projected join result.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 * @see org.apache.flink.api.java.operators.JoinOperator.ProjectJoin
@@ -598,67 +627,69 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			return joinProjection.projectTupleX();
 		}
-		
+
 		/**
-		 * Applies a ProjectJoin transformation and projects the second join input<br>
-		 * If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second join input is not a Tuple DataSet, no parameters should be passed.<br>
-		 * 
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * Applies a ProjectJoin transformation and projects the second join input
+		 *
+		 * <p>If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second join input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectSecond(int...)}.
 		 *
-		 * <b>Note: With the current implementation, the Project transformation loses type information.</b>
+		 * <p><b>Note: With the current implementation, the Project transformation loses type information.</b>
 		 *
-		 * @param secondFieldIndexes If the second input is a Tuple DataSet, the indexes of the selected fields. 
+		 * @param secondFieldIndexes If the second input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
 		 * 					   The order of fields in the output tuple is defined by to the order of field indexes.
 		 * @return A ProjectJoin which represents the projected join result.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 * @see org.apache.flink.api.java.operators.JoinOperator.ProjectJoin
 		 */
 		public <OUT extends Tuple> ProjectJoin<I1, I2, OUT> projectSecond(int... secondFieldIndexes) {
 			JoinProjection<I1, I2> joinProjection = new JoinProjection<>(getInput1(), getInput2(), getKeys1(), getKeys2(), getJoinHint(), null, secondFieldIndexes);
-			
+
 			return joinProjection.projectTupleX();
 		}
 
 //		public JoinOperator<I1, I2, I1> leftSemiJoin() {
 //			return new LeftSemiJoin<I1, I2>(getInput1(), getInput2(), getKeys1(), getKeys2(), getJoinHint());
 //		}
-		
+
 //		public JoinOperator<I1, I2, I2> rightSemiJoin() {
 //			return new RightSemiJoin<I1, I2>(getInput1(), getInput2(), getKeys1(), getKeys2(), getJoinHint());
 //		}
-		
+
 //		public JoinOperator<I1, I2, I1> leftAntiJoin() {
 //			return new LeftAntiJoin<I1, I2>(getInput1(), getInput2(), getKeys1(), getKeys2(), getJoinHint());
 //		}
-		
+
 //		public JoinOperator<I1, I2, I2> rightAntiJoin() {
 //			return new RightAntiJoin<I1, I2>(getInput1(), getInput2(), getKeys1(), getKeys2(), getJoinHint());
 //		}
 	}
-	
+
 	/**
-	 * A Join transformation that projects joining elements or fields of joining {@link Tuple Tuples} 
-	 * into result {@link Tuple Tuples}. <br>
-	 * It also represents the {@link DataSet} that is the result of a Join transformation. 
-	 * 
+	 * A Join transformation that projects joining elements or fields of joining {@link Tuple Tuples}
+	 * into result {@link Tuple Tuples}.
+	 *
+	 * <p>It also represents the {@link DataSet} that is the result of a Join transformation.
+	 *
 	 * @param <I1> The type of the first input DataSet of the Join transformation.
 	 * @param <I2> The type of the second input DataSet of the Join transformation.
 	 * @param <OUT> The type of the result of the Join transformation.
-	 * 
+	 *
 	 * @see Tuple
 	 * @see DataSet
 	 */
 	@Public
 	public static class ProjectJoin<I1, I2, OUT extends Tuple> extends EquiJoin<I1, I2, OUT> {
-		
+
 		private JoinProjection<I1, I2> joinProj;
-		
+
 		protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType) {
 			super(input1, input2, keys1, keys2,
 					new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
@@ -666,7 +697,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			joinProj = null;
 		}
-		
+
 		protected ProjectJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] fields, boolean[] isFromFirst, TupleTypeInfo<OUT> returnType, JoinProjection<I1, I2> joinProj) {
 			super(input1, input2, keys1, keys2,
 					new ProjectFlatJoinFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
@@ -681,15 +712,16 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Continues a ProjectJoin transformation and adds fields of the first join input to the projection.<br>
-		 * If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first join input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectJoin transformation and adds fields of the first join input to the projection.
+		 *
+		 * <p>If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first join input is not a Tuple DataSet, no parameters should be passed.
 		 *
-		 * Additional fields of the first and second input can be added by chaining the method calls of
+		 * <p>Additional fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectSecond(int...)}.
 		 *
-		 * <b>Note: With the current implementation, the Project transformation loses type information.</b>
+		 * <p><b>Note: With the current implementation, the Project transformation loses type information.</b>
 		 *
 		 * @param firstFieldIndexes If the first input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
@@ -701,22 +733,23 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		 * @see org.apache.flink.api.java.operators.JoinOperator.ProjectJoin
 		 */
 		@SuppressWarnings("hiding")
-		public <OUT extends Tuple> ProjectJoin<I1, I2, OUT> projectFirst(int... firstFieldIndexes) {	
+		public <OUT extends Tuple> ProjectJoin<I1, I2, OUT> projectFirst(int... firstFieldIndexes) {
 			joinProj = joinProj.projectFirst(firstFieldIndexes);
-			
+
 			return joinProj.projectTupleX();
 		}
 
 		/**
-		 * Continues a ProjectJoin transformation and adds fields of the second join input to the projection.<br>
-		 * If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second join input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectJoin transformation and adds fields of the second join input to the projection.
 		 *
-		 * Additional fields of the first and second input can be added by chaining the method calls of
+		 * <p>If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second join input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Additional fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.ProjectJoin#projectSecond(int...)}.
 		 *
-		 * <b>Note: With the current implementation, the Project transformation loses type information.</b>
+		 * <p><b>Note: With the current implementation, the Project transformation loses type information.</b>
 		 *
 		 * @param secondFieldIndexes If the second input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
@@ -730,7 +763,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		@SuppressWarnings("hiding")
 		public <OUT extends Tuple> ProjectJoin<I1, I2, OUT> projectSecond(int... secondFieldIndexes) {
 			joinProj = joinProj.projectSecond(secondFieldIndexes);
-			
+
 			return joinProj.projectTupleX();
 		}
 
@@ -743,15 +776,15 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		@Deprecated
 		@PublicEvolving
 		public <OUT extends Tuple> JoinOperator<I1, I2, OUT> types(Class<?>... types) {
-			TupleTypeInfo<OUT> typeInfo = (TupleTypeInfo<OUT>)this.getResultType();
+			TupleTypeInfo<OUT> typeInfo = (TupleTypeInfo<OUT>) this.getResultType();
 
-			if(types.length != typeInfo.getArity()) {
+			if (types.length != typeInfo.getArity()) {
 				throw new InvalidProgramException("Provided types do not match projection.");
 			}
-			for (int i=0; i<types.length; i++) {
+			for (int i = 0; i < types.length; i++) {
 				Class<?> typeClass = types[i];
 				if (!typeClass.equals(typeInfo.getTypeAt(i).getTypeClass())) {
-					throw new InvalidProgramException("Provided type "+typeClass.getSimpleName()+" at position "+i+" does not match projection");
+					throw new InvalidProgramException("Provided type " + typeClass.getSimpleName() + " at position " + i + " does not match projection");
 				}
 			}
 			return (JoinOperator<I1, I2, OUT>) this;
@@ -766,7 +799,7 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		public JoinOperator<I1, I2, OUT> withForwardedFieldsSecond(String... forwardedFieldsSecond) {
 			throw new InvalidProgramException("The semantic properties (forwarded fields) are automatically calculated.");
 		}
-		
+
 		@Override
 		protected DualInputSemanticProperties extractSemanticAnnotationsFromUdf(Class<?> udfClass) {
 			// we do not extract the annotation, we construct the properties from the projection#
@@ -775,68 +808,69 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 	}
-	
+
 //	@SuppressWarnings("unused")
 //	private static final class LeftAntiJoin<I1, I2> extends JoinOperator<I1, I2, I1> {
-//		
+//
 //		protected LeftAntiJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint) {
 //			super(input1, input2, keys1, keys2, input1.getType(), hint);
 //		}
-//		
+//
 //		@Override
 //		protected Operator<I1> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
 //			throw new UnsupportedOperationException("LeftAntiJoin operator currently not supported.");
 //		}
 //	}
-	
+
 //	@SuppressWarnings("unused")
 //	private static final class RightAntiJoin<I1, I2> extends JoinOperator<I1, I2, I2> {
-//		
+//
 //		protected RightAntiJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint) {
 //			super(input1, input2, keys1, keys2, input2.getType(), hint);
 //		}
-//		
+//
 //		@Override
 //		protected Operator<I2> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
 //			throw new UnsupportedOperationException("RightAntiJoin operator currently not supported.");
 //		}
 //	}
-	
+
 //	@SuppressWarnings("unused")
 //	private static final class LeftSemiJoin<I1, I2> extends EquiJoin<I1, I2, I1> {
-//		
+//
 //		protected LeftSemiJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint) {
 //			super(input1, input2, keys1, keys2, new LeftSemiJoinFunction<I1, I2>(), input1.getType(), hint);
 //		}
-//		
+//
 //		@Override
 //		protected Operator<I1> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
 //			// TODO: Runtime support required. Each left tuple may be returned only once.
-//			// 	     Special exec strategy (runtime + optimizer) based on hash join required. 
+//			// 	     Special exec strategy (runtime + optimizer) based on hash join required.
 //			// 		 Either no duplicates of right side in HT or left tuples removed from HT after first match.
 //			throw new UnsupportedOperationException("LeftSemiJoin operator currently not supported.");
 //		}
 //	}
-	
+
 //	@SuppressWarnings("unused")
 //	private static final class RightSemiJoin<I1, I2> extends EquiJoin<I1, I2, I2> {
-//		
+//
 //		protected RightSemiJoin(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint) {
 //			super(input1, input2, keys1, keys2, new RightSemiJoinFunction<I1, I2>(), input2.getType(), hint);
 //		}
-//		
+//
 //		@Override
 //		protected Operator<I2> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
 //			// TODO: Runtime support required. Each right tuple may be returned only once.
-//			// 	     Special exec strategy (runtime + optimizer) based on hash join required. 
+//			// 	     Special exec strategy (runtime + optimizer) based on hash join required.
 //			// 		 Either no duplicates of left side in HT or right tuples removed from HT after first match.
 //			throw new UnsupportedOperationException("RightSemiJoin operator currently not supported.");
 //		}
 //	}
 
 	/**
-	 * Intermediate step of a Join transformation. <br>
-	 * To continue the Join transformation, select the join key of the first input {@link DataSet} by calling
+	 * Intermediate step of a Join transformation.
+	 *
+	 * <p>To continue the Join transformation, select the join key of the first input {@link DataSet} by calling
 	 * {@link JoinOperatorSets#where(int...)} or
 	 * {@link JoinOperatorSets#where(org.apache.flink.api.java.functions.KeySelector)}.
 	 *
@@ -894,10 +928,10 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			return new JoinOperatorSetsPredicate(new SelectorFunctionKeys<>(input1.clean(keySelector), input1.getType(), keyType));
 		}
 
-
 		/**
-		 * Intermediate step of a Join transformation. <br>
-		 * To continue the Join transformation, select the join key of the second input {@link DataSet} by calling
+		 * Intermediate step of a Join transformation.
+		 *
+		 * <p>To continue the Join transformation, select the join key of the second input {@link DataSet} by calling
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinOperatorSets.JoinOperatorSetsPredicate#equalTo(int...)} or
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinOperatorSets.JoinOperatorSetsPredicate#equalTo(KeySelector)}.
 		 */
@@ -910,10 +944,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			/**
 			 * Continues a Join transformation and defines the {@link Tuple} fields of the second join
-			 * {@link DataSet} that should be used as join keys.<br>
-			 * <b>Note: Fields can only be selected as join keys on Tuple DataSets.</b><br>
-			 * <p>
-			 * The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
+			 * {@link DataSet} that should be used as join keys.
+			 *
+			 * <p><b>Note: Fields can only be selected as join keys on Tuple DataSets.</b>
+			 *
+			 * <p>The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
 			 * the element of the first input being the first field of the tuple and the element of the
 			 * second input being the second field of the tuple.
 			 *
@@ -927,9 +962,9 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			/**
 			 * Continues a Join transformation and defines the fields of the second join
-			 * {@link DataSet} that should be used as join keys.<br>
-			 * <p>
-			 * The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
+			 * {@link DataSet} that should be used as join keys.
+			 *
+			 * <p>The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
 			 * the element of the first input being the first field of the tuple and the element of the
 			 * second input being the second field of the tuple.
 			 *
@@ -942,11 +977,12 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			}
 
 			/**
-			 * Continues a Join transformation and defines a {@link KeySelector} function for the second join {@link DataSet}.<br>
-			 * The KeySelector function is called for each element of the second DataSet and extracts a single
-			 * key value on which the DataSet is joined. <br>
-			 * <p>
-			 * The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
+			 * Continues a Join transformation and defines a {@link KeySelector} function for the second join {@link DataSet}.
+			 *
+			 * <p>The KeySelector function is called for each element of the second DataSet and extracts a single
+			 * key value on which the DataSet is joined.
+			 *
+			 * <p>The resulting {@link DefaultJoin} wraps each pair of joining elements into a {@link Tuple2}, with
 			 * the element of the first input being the first field of the tuple and the element of the
 			 * second input being the second field of the tuple.
 			 *
@@ -961,7 +997,6 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 	}
 
-	
 	// --------------------------------------------------------------------------------------------
 	//  default join functions
 	// --------------------------------------------------------------------------------------------
@@ -969,13 +1004,13 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 	@ForwardedFieldsFirst("*->0")
 	@ForwardedFieldsSecond("*->1")
 	@Internal
-	public static final class DefaultFlatJoinFunction<T1, T2> extends RichFlatJoinFunction<T1, T2, Tuple2<T1, T2>> {
+	private static final class DefaultFlatJoinFunction<T1, T2> extends RichFlatJoinFunction<T1, T2, Tuple2<T1, T2>> {
 
 		private static final long serialVersionUID = 1L;
 		private final Tuple2<T1, T2> outTuple = new Tuple2<>();
 
 		@Override
-		public void join(T1 first, T2 second, Collector<Tuple2<T1,T2>> out) throws Exception {
+		public void join(T1 first, T2 second, Collector<Tuple2<T1, T2>> out) throws Exception {
 			outTuple.f0 = first;
 			outTuple.f1 = second;
 			out.collect(outTuple);
@@ -983,26 +1018,26 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 	}
 
 	@Internal
-	public static final class ProjectFlatJoinFunction<T1, T2, R extends Tuple> extends RichFlatJoinFunction<T1, T2, R> {
-		
+	private static final class ProjectFlatJoinFunction<T1, T2, R extends Tuple> extends RichFlatJoinFunction<T1, T2, R> {
+
 		private static final long serialVersionUID = 1L;
-		
+
 		private final int[] fields;
 		private final boolean[] isFromFirst;
 		private final R outTuple;
-	
+
 		/**
 		 * Instantiates and configures a ProjectJoinFunction.
 		 * Creates output tuples by copying fields of joined input tuples (or a full input object) into an output tuple.
-		 * 
-		 * @param fields List of indexes fields that should be copied to the output tuple. 
-		 * 					If the full input object should be copied (for example in case of a non-tuple input) the index should be -1. 
+		 *
+		 * @param fields List of indexes fields that should be copied to the output tuple.
+		 * 					If the full input object should be copied (for example in case of a non-tuple input) the index should be -1.
 		 * @param isFromFirst List of flags indicating whether the field should be copied from the first (true) or the second (false) input.
 		 * @param outTupleInstance An instance of an output tuple.
 		 */
 		private ProjectFlatJoinFunction(int[] fields, boolean[] isFromFirst, R outTupleInstance) {
-			if(fields.length != isFromFirst.length) {
-				throw new IllegalArgumentException("Fields and isFromFirst arrays must have same length!"); 
+			if (fields.length != isFromFirst.length) {
+				throw new IllegalArgumentException("Fields and isFromFirst arrays must have same length!");
 			}
 
 			this.fields = fields;
@@ -1036,12 +1071,12 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			}
 			out.collect(outTuple);
 		}
-		
+
 	}
 
 	@Internal
-	public static final class JoinProjection<I1, I2> {
-		
+	private static final class JoinProjection<I1, I2> {
+
 		private final DataSet<I1> ds1;
 		private final DataSet<I2> ds2;
 		private final Keys<I1> keys1;
@@ -1050,10 +1085,10 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 		private int[] fieldIndexes;
 		private boolean[] isFieldInFirst;
-		
+
 		private final int numFieldsDs1;
 		private final int numFieldsDs2;
-		
+
 		public JoinProjection(DataSet<I1> ds1, DataSet<I2> ds2, Keys<I1> keys1, Keys<I2> keys2, JoinHint hint, int[] firstFieldIndexes, int[] secondFieldIndexes) {
 			this.ds1 = ds1;
 			this.ds2 = ds2;
@@ -1063,32 +1098,32 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 			boolean isFirstTuple;
 			boolean isSecondTuple;
-			
-			if(ds1.getType() instanceof TupleTypeInfo) {
+
+			if (ds1.getType() instanceof TupleTypeInfo) {
 				numFieldsDs1 = ds1.getType().getArity();
 				isFirstTuple = true;
 			} else {
 				numFieldsDs1 = 1;
 				isFirstTuple = false;
 			}
-			if(ds2.getType() instanceof TupleTypeInfo) {
+			if (ds2.getType() instanceof TupleTypeInfo) {
 				numFieldsDs2 = ds2.getType().getArity();
 				isSecondTuple = true;
 			} else {
 				numFieldsDs2 = 1;
 				isSecondTuple = false;
 			}
-			
+
 			boolean isTuple;
 			boolean firstInput;
-			
-			if(firstFieldIndexes != null && secondFieldIndexes == null) {
+
+			if (firstFieldIndexes != null && secondFieldIndexes == null) {
 				// index array for first input is provided
 				firstInput = true;
 				isTuple = isFirstTuple;
 				this.fieldIndexes = firstFieldIndexes;
-				
-				if(this.fieldIndexes.length == 0) {
+
+				if (this.fieldIndexes.length == 0) {
 					// no indexes provided, treat tuple as regular object
 					isTuple = false;
 				}
@@ -1097,8 +1132,8 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 				firstInput = false;
 				isTuple = isSecondTuple;
 				this.fieldIndexes = secondFieldIndexes;
-				
-				if(this.fieldIndexes.length == 0) {
+
+				if (this.fieldIndexes.length == 0) {
 					// no indexes provided, treat tuple as regular object
 					isTuple = false;
 				}
@@ -1107,23 +1142,23 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			} else {
 				throw new IllegalArgumentException("You must provide at most one field index array.");
 			}
-			
-			if(!isTuple && this.fieldIndexes.length != 0) {
+
+			if (!isTuple && this.fieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectFirst() (or projectSecond()) without arguments to include it.");
-			} else if(this.fieldIndexes.length > 22) {
+			} else if (this.fieldIndexes.length > 22) {
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields.");
 			}
-			
-			if(isTuple) {
+
+			if (isTuple) {
 				this.isFieldInFirst = new boolean[this.fieldIndexes.length];
-				
+
 				// check field indexes and adapt to position in tuple
 				int maxFieldIndex = firstInput ? numFieldsDs1 : numFieldsDs2;
-				for(int i=0; i<this.fieldIndexes.length; i++) {
+				for (int i = 0; i < this.fieldIndexes.length; i++) {
 					Preconditions.checkElementIndex(this.fieldIndexes[i], maxFieldIndex);
 
-					if(firstInput) {
+					if (firstInput) {
 						this.isFieldInFirst[i] = true;
 					} else {
 						this.isFieldInFirst[i] = false;
@@ -1135,48 +1170,49 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 			}
 
 		}
-		
+
 		/**
-		 * Continues a ProjectJoin transformation and adds fields of the first join input.<br>
-		 * If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first join input is not a Tuple DataSet, no parameters should be passed.<br>
-		 * 
-		 * Fields of the first and second input can be added by chaining the method calls of
-		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#projectFirst(int...)} and 
+		 * Continues a ProjectJoin transformation and adds fields of the first join input.
+		 *
+		 * <p>If the first join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first join input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
+		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#projectSecond(int...)}.
-		 * 
+		 *
 		 * @param firstFieldIndexes If the first input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
 		 * 					   The order of fields in the output tuple is defined by to the order of field indexes.
 		 * @return An extended JoinProjection.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
 		protected JoinProjection<I1, I2> projectFirst(int... firstFieldIndexes) {
-			
+
 			boolean isFirstTuple;
 
 			isFirstTuple = ds1.getType() instanceof TupleTypeInfo && firstFieldIndexes.length > 0;
-			
-			if(!isFirstTuple && firstFieldIndexes.length != 0) {
+
+			if (!isFirstTuple && firstFieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectFirst() without arguments to include it.");
-			} else if(firstFieldIndexes.length > (22 - this.fieldIndexes.length)) {
+			} else if (firstFieldIndexes.length > (22 - this.fieldIndexes.length)) {
 				// to many field indexes provided
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields in total.");
 			}
-			
+
 			int offset = this.fieldIndexes.length;
-			
-			if(isFirstTuple) {
+
+			if (isFirstTuple) {
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + firstFieldIndexes.length);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + firstFieldIndexes.length);
-				
+
 				// copy field indexes
 				int maxFieldIndex = numFieldsDs1;
-				for(int i = 0; i < firstFieldIndexes.length; i++) {
+				for (int i = 0; i < firstFieldIndexes.length; i++) {
 					// check if indexes in range
 					Preconditions.checkElementIndex(firstFieldIndexes[i], maxFieldIndex);
 
@@ -1187,59 +1223,60 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + 1);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + 1);
-				
+
 				// add input object to output tuple
 				this.isFieldInFirst[offset] = true;
 				this.fieldIndexes[offset] = -1;
 			}
-			
+
 			return this;
 		}
-		
+
 		/**
-		 * Continues a ProjectJoin transformation and adds fields of the second join input.<br>
-		 * If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second join input is not a Tuple DataSet, no parameters should be passed.<br>
-		 * 
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * Continues a ProjectJoin transformation and adds fields of the second join input.
+		 *
+		 * <p>If the second join input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second join input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#projectSecond(int...)}.
-		 * 
-		 * @param secondFieldIndexes If the second input is a Tuple DataSet, the indexes of the selected fields. 
+		 *
+		 * @param secondFieldIndexes If the second input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
 		 * 					   The order of fields in the output tuple is defined by to the order of field indexes.
 		 * @return An extended JoinProjection.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
 		protected JoinProjection<I1, I2> projectSecond(int... secondFieldIndexes) {
-			
+
 			boolean isSecondTuple;
 
 			isSecondTuple = ds2.getType() instanceof TupleTypeInfo && secondFieldIndexes.length > 0;
-			
-			if(!isSecondTuple && secondFieldIndexes.length != 0) {
+
+			if (!isSecondTuple && secondFieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectSecond() without arguments to include it.");
-			} else if(secondFieldIndexes.length > (22 - this.fieldIndexes.length)) {
+			} else if (secondFieldIndexes.length > (22 - this.fieldIndexes.length)) {
 				// to many field indexes provided
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields in total.");
 			}
-			
+
 			int offset = this.fieldIndexes.length;
-			
-			if(isSecondTuple) {
+
+			if (isSecondTuple) {
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + secondFieldIndexes.length);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + secondFieldIndexes.length);
-				
+
 				// copy field indexes
 				int maxFieldIndex = numFieldsDs2;
-				for(int i = 0; i < secondFieldIndexes.length; i++) {
+				for (int i = 0; i < secondFieldIndexes.length; i++) {
 					// check if indexes in range
 					Preconditions.checkElementIndex(secondFieldIndexes[i], maxFieldIndex);
-					
+
 					this.isFieldInFirst[offset + i] = false;
 					this.fieldIndexes[offset + i] = secondFieldIndexes[i];
 				}
@@ -1247,27 +1284,27 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + 1);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + 1);
-				
+
 				// add input object to output tuple
 				this.isFieldInFirst[offset] = false;
 				this.fieldIndexes[offset] = -1;
 			}
-			
+
 			return this;
 		}
-		
-		// --------------------------------------------------------------------------------------------	
+
+		// --------------------------------------------------------------------------------------------
 		// The following lines are generated.
-		// --------------------------------------------------------------------------------------------	
-		// BEGIN_OF_TUPLE_DEPENDENT_CODE	
+		// --------------------------------------------------------------------------------------------
+		// BEGIN_OF_TUPLE_DEPENDENT_CODE
 	// GENERATED FROM org.apache.flink.api.java.tuple.TupleGenerator.
 
 		/**
 		 * Chooses a projectTupleX according to the length of
-		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#fieldIndexes}
-		 * 
+		 * {@link org.apache.flink.api.java.operators.JoinOperator.JoinProjection#fieldIndexes}.
+		 *
 		 * @return The projected DataSet.
-		 * 
+		 *
 		 * @see org.apache.flink.api.java.operators.JoinOperator.ProjectJoin
 		 */
 		@SuppressWarnings("unchecked")
@@ -1307,11 +1344,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1323,11 +1360,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1339,11 +1376,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1355,11 +1392,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1371,11 +1408,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1387,11 +1424,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1403,11 +1440,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1419,11 +1456,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1435,11 +1472,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1451,11 +1488,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1467,11 +1504,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1483,11 +1520,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1499,11 +1536,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1515,11 +1552,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1531,11 +1568,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1547,11 +1584,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1563,11 +1600,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1579,11 +1616,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1595,11 +1632,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1611,11 +1648,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1627,11 +1664,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1643,11 +1680,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1659,11 +1696,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1675,11 +1712,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1691,11 +1728,11 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 		}
 
 		/**
-		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields. 
-		 * Requires the classes of the fields of the resulting tuples. 
-		 * 
+		 * Projects a pair of joined elements to a {@link Tuple} with the previously selected fields.
+		 * Requires the classes of the fields of the resulting tuples.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1708,23 +1745,23 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 		// END_OF_TUPLE_DEPENDENT_CODE
 		// -----------------------------------------------------------------------------------------
-		
+
 		private TypeInformation<?>[] extractFieldTypes(int[] fields) {
-			
+
 			TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length];
 
-			for(int i=0; i<fields.length; i++) {
-				
+			for (int i = 0; i < fields.length; i++) {
+
 				TypeInformation<?> typeInfo;
-				if(isFieldInFirst[i]) {
-					if(fields[i] >= 0) {
-						typeInfo = ((TupleTypeInfo<?>)ds1.getType()).getTypeAt(fields[i]);
+				if (isFieldInFirst[i]) {
+					if (fields[i] >= 0) {
+						typeInfo = ((TupleTypeInfo<?>) ds1.getType()).getTypeAt(fields[i]);
 					} else {
 						typeInfo = ds1.getType();
 					}
 				} else {
-					if(fields[i] >= 0) {
-						typeInfo = ((TupleTypeInfo<?>)ds2.getType()).getTypeAt(fields[i]);
+					if (fields[i] >= 0) {
+						typeInfo = ((TupleTypeInfo<?>) ds2.getType()).getTypeAt(fields[i]);
 					} else {
 						typeInfo = ds2.getType();
 					}
@@ -1732,9 +1769,9 @@ public abstract class JoinOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1,
 
 				fieldTypes[i] = typeInfo;
 			}
-			
+
 			return fieldTypes;
 		}
-				
+
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/KeyFunctions.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/KeyFunctions.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/KeyFunctions.java
index 057048c..f6336cd 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/KeyFunctions.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/KeyFunctions.java
@@ -20,10 +20,10 @@ package org.apache.flink.api.java.operators;
 
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.UnaryOperatorInformation;
 import org.apache.flink.api.common.operators.base.MapOperatorBase;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.java.operators.translation.KeyExtractingMapper;
 import org.apache.flink.api.java.operators.translation.KeyRemovingMapper;
 import org.apache.flink.api.java.operators.translation.TwoKeyExtractingMapper;
@@ -41,8 +41,7 @@ public class KeyFunctions {
 	@SuppressWarnings("unchecked")
 	public static <T, K> org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> appendKeyExtractor(
 			org.apache.flink.api.common.operators.Operator<T> input,
-			SelectorFunctionKeys<T, K> key)
-	{
+			SelectorFunctionKeys<T, K> key) {
 
 		TypeInformation<T> inputType = key.getInputType();
 		TypeInformation<Tuple2<K, T>> typeInfoWithKey = createTypeWithKey(key);
@@ -65,8 +64,7 @@ public class KeyFunctions {
 	public static <T, K1, K2> org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> appendKeyExtractor(
 			org.apache.flink.api.common.operators.Operator<T> input,
 			SelectorFunctionKeys<T, K1> key1,
-			SelectorFunctionKeys<T, K2> key2)
-	{
+			SelectorFunctionKeys<T, K2> key2) {
 
 		TypeInformation<T> inputType = key1.getInputType();
 		TypeInformation<Tuple3<K1, K2, T>> typeInfoWithKey = createTypeWithKey(key1, key2);
@@ -88,8 +86,7 @@ public class KeyFunctions {
 
 	public static <T, K> org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> appendKeyRemover(
 			org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> inputWithKey,
-			SelectorFunctionKeys<T, K> key)
-	{
+			SelectorFunctionKeys<T, K> key) {
 
 		TypeInformation<T> inputType = key.getInputType();
 		TypeInformation<Tuple2<K, T>> typeInfoWithKey = createTypeWithKey(key);
@@ -107,15 +104,13 @@ public class KeyFunctions {
 	}
 
 	public static <T, K> TypeInformation<Tuple2<K, T>> createTypeWithKey(
-			SelectorFunctionKeys<T, K> key)
-	{
+			SelectorFunctionKeys<T, K> key) {
 		return new TupleTypeInfo<>(key.getKeyType(), key.getInputType());
 	}
 
 	public static <T, K1, K2> TypeInformation<Tuple3<K1, K2, T>> createTypeWithKey(
 			SelectorFunctionKeys<T, K1> key1,
-			SelectorFunctionKeys<T, K2> key2)
-	{
+			SelectorFunctionKeys<T, K2> key2) {
 		return new TupleTypeInfo<>(key1.getKeyType(), key2.getKeyType(), key1.getInputType());
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/MapOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/MapOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/MapOperator.java
index 2f398fb..a9d5672 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/MapOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/MapOperator.java
@@ -29,52 +29,52 @@ import org.apache.flink.api.java.DataSet;
 /**
  * This operator represents the application of a "map" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set consumed by the operator.
  * @param <OUT> The type of the data set created by the operator.
- * 
+ *
  * @see org.apache.flink.api.common.functions.MapFunction
  */
 @Public
 public class MapOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT, MapOperator<IN, OUT>> {
-	
+
 	protected final MapFunction<IN, OUT> function;
-	
+
 	protected final String defaultName;
 
 	public MapOperator(DataSet<IN> input, TypeInformation<OUT> resultType, MapFunction<IN, OUT> function, String defaultName) {
 		super(input, resultType);
-		
+
 		this.defaultName = defaultName;
 		this.function = function;
 
 		UdfOperatorUtils.analyzeSingleInputUdf(this, MapFunction.class, defaultName, function, null);
 	}
-	
+
 	@Override
 	protected MapFunction<IN, OUT> getFunction() {
 		return function;
 	}
-	
+
 	@Override
 	protected MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) {
-		
-		String name = getName() != null ? getName() : "Map at "+defaultName;
+
+		String name = getName() != null ? getName() : "Map at " + defaultName;
 		// create operator
 		MapOperatorBase<IN, OUT, MapFunction<IN, OUT>> po = new MapOperatorBase<IN, OUT, MapFunction<IN, OUT>>(function,
 				new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name);
 		// set input
 		po.setInput(input);
 		// set parallelism
-		if(this.getParallelism() > 0) {
+		if (this.getParallelism() > 0) {
 			// use specified parallelism
 			po.setParallelism(this.getParallelism());
 		} else {
 			// if no parallelism has been specified, use parallelism of input operator to enable chaining
 			po.setParallelism(input.getParallelism());
 		}
-		
+
 		return po;
 	}
-	
+
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/MapPartitionOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/MapPartitionOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/MapPartitionOperator.java
index 6bc48b8..e03f39d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/MapPartitionOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/MapPartitionOperator.java
@@ -29,48 +29,48 @@ import org.apache.flink.api.java.DataSet;
 /**
  * This operator represents the application of a "mapPartition" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set consumed by the operator.
  * @param <OUT> The type of the data set created by the operator.
- * 
+ *
  * @see MapPartitionFunction
  */
 @Public
 public class MapPartitionOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT, MapPartitionOperator<IN, OUT>> {
-	
+
 	protected final MapPartitionFunction<IN, OUT> function;
-	
+
 	protected final String defaultName;
-	
+
 	public MapPartitionOperator(DataSet<IN> input, TypeInformation<OUT> resultType, MapPartitionFunction<IN, OUT> function, String defaultName) {
 		super(input, resultType);
-		
+
 		this.function = function;
 		this.defaultName = defaultName;
 	}
-	
+
 	@Override
 	protected MapPartitionFunction<IN, OUT> getFunction() {
 		return function;
 	}
-	
+
 	@Override
 	protected MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) {
-		
-		String name = getName() != null ? getName() : "MapPartition at "+defaultName;
+
+		String name = getName() != null ? getName() : "MapPartition at " + defaultName;
 		// create operator
 		MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>> po = new MapPartitionOperatorBase<IN, OUT, MapPartitionFunction<IN, OUT>>(function, new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name);
 		// set input
 		po.setInput(input);
 		// set parallelism
-		if(this.getParallelism() > 0) {
+		if (this.getParallelism() > 0) {
 			// use specified parallelism
 			po.setParallelism(this.getParallelism());
 		} else {
 			// if no parallelism has been specified, use parallelism of input operator to enable chaining
 			po.setParallelism(input.getParallelism());
 		}
-		
+
 		return po;
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java
index e496c62..463dc35 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/Operator.java
@@ -28,7 +28,7 @@ import org.apache.flink.util.Preconditions;
 
 /**
  * Base class of all operators in the Java API.
- * 
+ *
  * @param <OUT> The type of the data set produced by this operator.
  * @param <O> The type of the operator, so that we can return it.
  */
@@ -36,21 +36,20 @@ import org.apache.flink.util.Preconditions;
 public abstract class Operator<OUT, O extends Operator<OUT, O>> extends DataSet<OUT> {
 
 	protected String name;
-	
+
 	protected int parallelism = ExecutionConfig.PARALLELISM_DEFAULT;
 
 	protected ResourceSpec minResources = ResourceSpec.DEFAULT;
 
 	protected ResourceSpec preferredResources = ResourceSpec.DEFAULT;
 
-
 	protected Operator(ExecutionEnvironment context, TypeInformation<OUT> resultType) {
 		super(context, resultType);
 	}
-	
+
 	/**
 	 * Returns the type of the result of this operator.
-	 * 
+	 *
 	 * @return The result type of the operator.
 	 */
 	public TypeInformation<OUT> getResultType() {
@@ -60,16 +59,16 @@ public abstract class Operator<OUT, O extends Operator<OUT, O>> extends DataSet<
 	/**
 	 * Returns the name of the operator. If no name has been set, it returns the name of the
 	 * operation, or the name of the class implementing the function of this operator.
-	 * 
+	 *
 	 * @return The name of the operator.
 	 */
 	public String getName() {
 		return name;
 	}
-	
+
 	/**
 	 * Returns the parallelism of this operator.
-	 * 
+	 *
 	 * @return The parallelism of this operator.
 	 */
 	public int getParallelism() {
@@ -100,7 +99,7 @@ public abstract class Operator<OUT, O extends Operator<OUT, O>> extends DataSet<
 	 * Sets the name of this operator. This overrides the default name, which is either
 	 * a generated description of the operation (such as for example "Aggregate(1:SUM, 2:MIN)")
 	 * or the name the user-defined function or input/output format executed by the operator.
-	 * 
+	 *
 	 * @param newName The name for this operator.
 	 * @return The operator with a new name.
 	 */
@@ -110,11 +109,11 @@ public abstract class Operator<OUT, O extends Operator<OUT, O>> extends DataSet<
 		O returnType = (O) this;
 		return returnType;
 	}
-	
+
 	/**
 	 * Sets the parallelism for this operator.
 	 * The parallelism must be 1 or more.
-	 * 
+	 *
 	 * @param parallelism The parallelism for this operator. A value equal to {@link ExecutionConfig#PARALLELISM_DEFAULT}
 	 *        will use the system default.
 	 * @return The operator with set parallelism.

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/OperatorTranslation.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/OperatorTranslation.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/OperatorTranslation.java
index 22b9186..facadc0 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/OperatorTranslation.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/OperatorTranslation.java
@@ -36,31 +36,32 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * Used for translating data sets into corresponding operators.
+ */
 @Internal
 public class OperatorTranslation {
-	
-	/** The already translated operations */
+
+	/** The already translated operations. */
 	private Map<DataSet<?>, Operator<?>> translated = new HashMap<>();
-	
-	
+
 	public Plan translateToPlan(List<DataSink<?>> sinks, String jobName) {
 		List<GenericDataSinkBase<?>> planSinks = new ArrayList<>();
-		
+
 		for (DataSink<?> sink : sinks) {
 			planSinks.add(translate(sink));
 		}
-		
+
 		Plan p = new Plan(planSinks);
 		p.setJobName(jobName);
 		return p;
 	}
-	
-	
+
 	private <T> GenericDataSinkBase<T> translate(DataSink<T> sink) {
-		
+
 		// translate the input recursively
 		Operator<T> input = translate(sink.getDataSet());
-		
+
 		// translate the sink itself and connect it to the input
 		GenericDataSinkBase<T> translatedSink = sink.translateToDataFlow(input);
 
@@ -68,8 +69,7 @@ public class OperatorTranslation {
 
 		return translatedSink;
 	}
-	
-	
+
 	private <T> Operator<T> translate(DataSet<T> dataSet) {
 		while (dataSet instanceof NoOpOperator) {
 			dataSet = ((NoOpOperator<T>) dataSet).getInput();
@@ -89,9 +89,9 @@ public class OperatorTranslation {
 				return typedPrevious;
 			}
 		}
-		
+
 		Operator<T> dataFlowOp;
-		
+
 		if (dataSet instanceof DataSource) {
 			DataSource<T> dataSource = (DataSource<T>) dataSet;
 			dataFlowOp = dataSource.translateToDataFlow();
@@ -126,28 +126,27 @@ public class OperatorTranslation {
 		else {
 			throw new RuntimeException("Error while creating the data flow plan for the program: Unknown operator or data set type: " + dataSet);
 		}
-		
+
 		this.translated.put(dataSet, dataFlowOp);
-		
+
 		// take care of broadcast variables
 		translateBcVariables(dataSet, dataFlowOp);
-		
+
 		return dataFlowOp;
 	}
-	
-	
+
 	private <I, O> org.apache.flink.api.common.operators.Operator<O> translateSingleInputOperator(SingleInputOperator<?, ?, ?> op) {
-		
+
 		@SuppressWarnings("unchecked")
 		SingleInputOperator<I, O, ?> typedOp = (SingleInputOperator<I, O, ?>) op;
-		
+
 		@SuppressWarnings("unchecked")
 		DataSet<I> typedInput = (DataSet<I>) op.getInput();
-		
+
 		Operator<I> input = translate(typedInput);
-		
+
 		org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input);
-		
+
 		if (op instanceof UdfOperator<?>) {
 			@SuppressWarnings("unchecked")
 			SingleInputUdfOperator<I, O, ?> udfOp = (SingleInputUdfOperator<I, O, ?>) op;
@@ -165,29 +164,29 @@ public class OperatorTranslation {
 				unaryOp.setSemanticProperties(udfOp.getSemanticProperties());
 			}
 		}
-		
+
 		return dataFlowOp;
 	}
-	
+
 	private <I1, I2, O> org.apache.flink.api.common.operators.Operator<O> translateTwoInputOperator(TwoInputOperator<?, ?, ?, ?> op) {
-		
+
 		@SuppressWarnings("unchecked")
 		TwoInputOperator<I1, I2, O, ?> typedOp = (TwoInputOperator<I1, I2, O, ?>) op;
-		
+
 		@SuppressWarnings("unchecked")
 		DataSet<I1> typedInput1 = (DataSet<I1>) op.getInput1();
 		@SuppressWarnings("unchecked")
 		DataSet<I2> typedInput2 = (DataSet<I2>) op.getInput2();
-		
+
 		Operator<I1> input1 = translate(typedInput1);
 		Operator<I2> input2 = translate(typedInput2);
-		
+
 		org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input1, input2);
-		
-		if (op instanceof UdfOperator<?> ) {
+
+		if (op instanceof UdfOperator<?>) {
 			@SuppressWarnings("unchecked")
 			TwoInputUdfOperator<I1, I2, O, ?> udfOp = (TwoInputUdfOperator<I1, I2, O, ?>) op;
-			
+
 			// set configuration parameters
 			Configuration opParams = udfOp.getParameters();
 			if (opParams != null) {
@@ -201,16 +200,14 @@ public class OperatorTranslation {
 				binaryOp.setSemanticProperties(udfOp.getSemanticProperties());
 			}
 		}
-		
+
 		return dataFlowOp;
 	}
-	
-	
+
 	private <T> BulkIterationBase<T> translateBulkIteration(BulkIterationResultSet<?> untypedIterationEnd) {
 		@SuppressWarnings("unchecked")
 		BulkIterationResultSet<T> iterationEnd = (BulkIterationResultSet<T>) untypedIterationEnd;
 		IterativeDataSet<T> iterationHead = iterationEnd.getIterationHead();
-
 		BulkIterationBase<T> iterationOperator =
 				new BulkIterationBase<>(new UnaryOperatorInformation<>(iterationEnd.getType(), iterationEnd.getType()), "Bulk Iteration");
 
@@ -224,28 +221,28 @@ public class OperatorTranslation {
 		iterationOperator.setNextPartialSolution(translatedBody);
 		iterationOperator.setMaximumNumberOfIterations(iterationHead.getMaxIterations());
 		iterationOperator.setInput(translate(iterationHead.getInput()));
-		
+
 		iterationOperator.getAggregators().addAll(iterationHead.getAggregators());
-		
-		if(iterationEnd.getTerminationCriterion() != null) {
+
+		if (iterationEnd.getTerminationCriterion() != null) {
 			iterationOperator.setTerminationCriterion(translate(iterationEnd.getTerminationCriterion()));
 		}
 
 		return iterationOperator;
 	}
-	
+
 	private <D, W> DeltaIterationBase<D, W> translateDeltaIteration(DeltaIterationResultSet<?, ?> untypedIterationEnd) {
 		@SuppressWarnings("unchecked")
 		DeltaIterationResultSet<D, W> iterationEnd = (DeltaIterationResultSet<D, W>) untypedIterationEnd;
 		DeltaIteration<D, W> iterationHead = iterationEnd.getIterationHead();
-		
+
 		String name = iterationHead.getName() == null ? "Unnamed Delta Iteration" : iterationHead.getName();
-		
+
 		DeltaIterationBase<D, W> iterationOperator = new DeltaIterationBase<>(new BinaryOperatorInformation<>(iterationEnd.getType(), iterationEnd.getWorksetType(), iterationEnd.getType()),
 				iterationEnd.getKeyPositions(), name);
-		
+
 		iterationOperator.setMaximumNumberOfIterations(iterationEnd.getMaxIterations());
-		
+
 		if (iterationHead.getParallelism() > 0) {
 			iterationOperator.setParallelism(iterationHead.getParallelism());
 		}
@@ -258,31 +255,31 @@ public class OperatorTranslation {
 
 		Operator<D> translatedSolutionSet = translate(iterationEnd.getNextSolutionSet());
 		Operator<W> translatedWorkset = translate(iterationEnd.getNextWorkset());
-		
+
 		iterationOperator.setNextWorkset(translatedWorkset);
 		iterationOperator.setSolutionSetDelta(translatedSolutionSet);
 
 		iterationOperator.setInitialSolutionSet(translate(iterationHead.getInitialSolutionSet()));
 		iterationOperator.setInitialWorkset(translate(iterationHead.getInitialWorkset()));
-		
+
 		// register all aggregators
 		iterationOperator.getAggregators().addAll(iterationHead.getAggregators());
-		
+
 		iterationOperator.setSolutionSetUnManaged(iterationHead.isSolutionSetUnManaged());
-		
+
 		return iterationOperator;
 	}
-	
+
 	private void translateBcVariables(DataSet<?> setOrOp, Operator<?> dataFlowOp) {
 		// check if this is actually an operator that could have broadcast variables
 		if (setOrOp instanceof UdfOperator) {
 			if (!(dataFlowOp instanceof AbstractUdfOperator<?, ?>)) {
 				throw new RuntimeException("Error while creating the data flow plan for the program: A UDF operation was not translated to a UDF operator.");
 			}
-			
+
 			UdfOperator<?> udfOp = (UdfOperator<?>) setOrOp;
 			AbstractUdfOperator<?, ?> udfDataFlowOp = (AbstractUdfOperator<?, ?>) dataFlowOp;
-		
+
 			for (Map.Entry<String, DataSet<?>> bcVariable : udfOp.getBroadcastSets().entrySet()) {
 				Operator<?> bcInput = translate(bcVariable.getValue());
 				udfDataFlowOp.setBroadcastVariable(bcVariable.getKey(), bcInput);

[5/6] flink git commit: [FLINK-7181] Activate checkstyle flink-java/operators/*

Posted by dw...@apache.org.
[FLINK-7181] Activate checkstyle flink-java/operators/*

This closes #4342.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/34e82f9d
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/34e82f9d
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/34e82f9d

Branch: refs/heads/master
Commit: 34e82f9de2e4253ef5ff01f4a7e2446808dfebfd
Parents: 8bee543
Author: Dawid Wysakowicz <dw...@apache.org>
Authored: Fri Jul 14 10:43:40 2017 +0200
Committer: Dawid Wysakowicz <dw...@apache.org>
Committed: Tue Jul 25 10:06:56 2017 +0200

----------------------------------------------------------------------
 .../api/java/operators/AggregateOperator.java   | 130 ++--
 .../java/operators/BulkIterationResultSet.java  |   7 +-
 .../api/java/operators/CoGroupOperator.java     | 269 +++----
 .../api/java/operators/CoGroupRawOperator.java  |  19 +-
 .../flink/api/java/operators/CrossOperator.java | 417 ++++++-----
 .../java/operators/CustomUnaryOperation.java    |   6 +-
 .../flink/api/java/operators/DataSink.java      |  84 +--
 .../flink/api/java/operators/DataSource.java    |  46 +-
 .../api/java/operators/DeltaIteration.java      |  97 ++-
 .../java/operators/DeltaIterationResultSet.java |  21 +-
 .../api/java/operators/DistinctOperator.java    |   7 +-
 .../api/java/operators/FilterOperator.java      |  26 +-
 .../api/java/operators/FlatMapOperator.java     |  22 +-
 .../java/operators/GroupCombineOperator.java    |  21 +-
 .../api/java/operators/GroupReduceOperator.java |  60 +-
 .../flink/api/java/operators/Grouping.java      |  30 +-
 .../api/java/operators/IterativeDataSet.java    |  53 +-
 .../flink/api/java/operators/JoinOperator.java  | 701 ++++++++++---------
 .../flink/api/java/operators/KeyFunctions.java  |  17 +-
 .../flink/api/java/operators/MapOperator.java   |  24 +-
 .../java/operators/MapPartitionOperator.java    |  24 +-
 .../flink/api/java/operators/Operator.java      |  21 +-
 .../api/java/operators/OperatorTranslation.java | 101 ++-
 .../api/java/operators/PartitionOperator.java   |  62 +-
 .../api/java/operators/ProjectOperator.java     | 231 +++---
 .../api/java/operators/ReduceOperator.java      |  67 +-
 .../api/java/operators/SingleInputOperator.java |  21 +-
 .../java/operators/SingleInputUdfOperator.java  |  86 +--
 .../java/operators/SortPartitionOperator.java   |   1 -
 .../api/java/operators/SortedGrouping.java      |  96 +--
 .../api/java/operators/TwoInputOperator.java    |  27 +-
 .../api/java/operators/TwoInputUdfOperator.java |  89 +--
 .../flink/api/java/operators/UdfOperator.java   |  33 +-
 .../api/java/operators/UdfOperatorUtils.java    |   3 +-
 .../flink/api/java/operators/UnionOperator.java |  20 +-
 .../api/java/operators/UnsortedGrouping.java    | 129 ++--
 .../operators/join/JoinOperatorSetsBase.java    |  55 +-
 .../flink/api/java/operators/join/JoinType.java |   3 +
 .../flink/api/java/operators/NamesTest.java     |  20 +-
 tools/maven/suppressions-java.xml               |   4 -
 40 files changed, 1598 insertions(+), 1552 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/AggregateOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/AggregateOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/AggregateOperator.java
index 26cf054..05bf909 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/AggregateOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/AggregateOperator.java
@@ -18,9 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.InvalidProgramException;
@@ -42,56 +39,57 @@ import org.apache.flink.configuration.Configuration;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.Preconditions;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * This operator represents the application of a "aggregate" operation on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set aggregated by the operator.
  */
 @Public
 public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, AggregateOperator<IN>> {
-	
+
 	private final List<AggregationFunction<?>> aggregationFunctions = new ArrayList<>(4);
-	
+
 	private final List<Integer> fields = new ArrayList<>(4);
-	
+
 	private final Grouping<IN> grouping;
-	
+
 	private final String aggregateLocationName;
-	
+
 	/**
-	 * <p>
-	 * Non grouped aggregation
+	 * Non grouped aggregation.
 	 */
 	public AggregateOperator(DataSet<IN> input, Aggregations function, int field, String aggregateLocationName) {
 		super(Preconditions.checkNotNull(input), input.getType());
 		Preconditions.checkNotNull(function);
-		
+
 		this.aggregateLocationName = aggregateLocationName;
-		
+
 		if (!input.getType().isTupleType()) {
 			throw new InvalidProgramException("Aggregating on field positions is only possible on tuple data types.");
 		}
-		
+
 		TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) input.getType();
-		
+
 		if (field < 0 || field >= inType.getArity()) {
 			throw new IllegalArgumentException("Aggregation field position is out of range.");
 		}
-		
+
 		AggregationFunctionFactory factory = function.getFactory();
 		AggregationFunction<?> aggFunct = factory.createAggregationFunction(inType.getTypeAt(field).getTypeClass());
-		
+
 		// this is the first aggregation operator after a regular data set (non grouped aggregation)
 		this.aggregationFunctions.add(aggFunct);
 		this.fields.add(field);
 		this.grouping = null;
 	}
-	
+
 	/**
-	 * 
-	 * Grouped aggregation
-	 * 
+	 * Grouped aggregation.
+	 *
 	 * @param input
 	 * @param function
 	 * @param field
@@ -99,49 +97,46 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 	public AggregateOperator(Grouping<IN> input, Aggregations function, int field, String aggregateLocationName) {
 		super(Preconditions.checkNotNull(input).getInputDataSet(), input.getInputDataSet().getType());
 		Preconditions.checkNotNull(function);
-		
+
 		this.aggregateLocationName = aggregateLocationName;
-		
+
 		if (!input.getInputDataSet().getType().isTupleType()) {
 			throw new InvalidProgramException("Aggregating on field positions is only possible on tuple data types.");
 		}
-		
+
 		TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) input.getInputDataSet().getType();
-		
+
 		if (field < 0 || field >= inType.getArity()) {
 			throw new IllegalArgumentException("Aggregation field position is out of range.");
 		}
-		
+
 		AggregationFunctionFactory factory = function.getFactory();
 		AggregationFunction<?> aggFunct = factory.createAggregationFunction(inType.getTypeAt(field).getTypeClass());
-		
+
 		// set the aggregation fields
 		this.aggregationFunctions.add(aggFunct);
 		this.fields.add(field);
 		this.grouping = input;
 	}
-	
-	
+
 	public AggregateOperator<IN> and(Aggregations function, int field) {
 		Preconditions.checkNotNull(function);
-		
+
 		TupleTypeInfoBase<?> inType = (TupleTypeInfoBase<?>) getType();
-		
+
 		if (field < 0 || field >= inType.getArity()) {
 			throw new IllegalArgumentException("Aggregation field position is out of range.");
 		}
-		
-		
+
 		AggregationFunctionFactory factory = function.getFactory();
 		AggregationFunction<?> aggFunct = factory.createAggregationFunction(inType.getTypeAt(field).getTypeClass());
-		
+
 		this.aggregationFunctions.add(aggFunct);
 		this.fields.add(field);
 
 		return this;
 	}
 
-
 	public AggregateOperator<IN> andSum (int field) {
 		return this.and(Aggregations.SUM, field);
 	}
@@ -154,68 +149,65 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 		return this.and(Aggregations.MAX, field);
 	}
 
-
 	@SuppressWarnings("unchecked")
 	@Override
 	@Internal
 	protected org.apache.flink.api.common.operators.base.GroupReduceOperatorBase<IN, IN, GroupReduceFunction<IN, IN>> translateToDataFlow(Operator<IN> input) {
-		
+
 		// sanity check
 		if (this.aggregationFunctions.isEmpty() || this.aggregationFunctions.size() != this.fields.size()) {
 			throw new IllegalStateException();
 		}
-		
+
 		// construct the aggregation function
 		AggregationFunction<Object>[] aggFunctions = new AggregationFunction[this.aggregationFunctions.size()];
 		int[] fields = new int[this.fields.size()];
 		StringBuilder genName = new StringBuilder();
-		
+
 		for (int i = 0; i < fields.length; i++) {
 			aggFunctions[i] = (AggregationFunction<Object>) this.aggregationFunctions.get(i);
 			fields[i] = this.fields.get(i);
-			
+
 			genName.append(aggFunctions[i].toString()).append('(').append(fields[i]).append(')').append(',');
 		}
 		genName.append(" at ").append(aggregateLocationName);
-		genName.setLength(genName.length()-1);
-		
-		
+		genName.setLength(genName.length() - 1);
+
 		@SuppressWarnings("rawtypes")
 		RichGroupReduceFunction<IN, IN> function = new AggregatingUdf(aggFunctions, fields);
-		
-		
+
 		String name = getName() != null ? getName() : genName.toString();
-		
+
 		// distinguish between grouped reduce and non-grouped reduce
 		if (this.grouping == null) {
 			// non grouped aggregation
 			UnaryOperatorInformation<IN, IN> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getResultType());
 			GroupReduceOperatorBase<IN, IN, GroupReduceFunction<IN, IN>> po =
 					new GroupReduceOperatorBase<IN, IN, GroupReduceFunction<IN, IN>>(function, operatorInfo, new int[0], name);
-			
+
 			po.setCombinable(true);
-			
+
 			// set input
 			po.setInput(input);
 			// set parallelism
 			po.setParallelism(this.getParallelism());
-			
+
 			return po;
 		}
-		
+
 		if (this.grouping.getKeys() instanceof Keys.ExpressionKeys) {
 			// grouped aggregation
 			int[] logicalKeyPositions = this.grouping.getKeys().computeLogicalKeyPositions();
 			UnaryOperatorInformation<IN, IN> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getResultType());
 			GroupReduceOperatorBase<IN, IN, GroupReduceFunction<IN, IN>> po =
 					new GroupReduceOperatorBase<IN, IN, GroupReduceFunction<IN, IN>>(function, operatorInfo, logicalKeyPositions, name);
-			
+
 			po.setCombinable(true);
-			
+
 			po.setInput(input);
 			po.setParallelism(this.getParallelism());
 			po.setCustomPartitioner(grouping.getCustomPartitioner());
-			
+
 			SingleInputSemanticProperties props = new SingleInputSemanticProperties();
 
 			for (int keyField : logicalKeyPositions) {
@@ -232,9 +224,9 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 					props.addForwardedField(keyField, keyField);
 				}
 			}
-			
+
 			po.setSemanticProperties(props);
-			
+
 			return po;
 		}
 		else if (this.grouping.getKeys() instanceof Keys.SelectorFunctionKeys) {
@@ -243,32 +235,30 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 		else {
 			throw new UnsupportedOperationException("Unrecognized key type.");
 		}
-		
+
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	@Internal
-	public static final class AggregatingUdf<T extends Tuple>
+	private static final class AggregatingUdf<T extends Tuple>
 		extends RichGroupReduceFunction<T, T>
 		implements GroupCombineFunction<T, T> {
 
 		private static final long serialVersionUID = 1L;
-		
+
 		private final int[] fieldPositions;
-		
+
 		private final AggregationFunction<Object>[] aggFunctions;
-		
-		
+
 		public AggregatingUdf(AggregationFunction<Object>[] aggFunctions, int[] fieldPositions) {
 			Preconditions.checkNotNull(aggFunctions);
 			Preconditions.checkNotNull(aggFunctions);
 			Preconditions.checkArgument(aggFunctions.length == fieldPositions.length);
-			
+
 			this.aggFunctions = aggFunctions;
 			this.fieldPositions = fieldPositions;
 		}
-		
 
 		@Override
 		public void open(Configuration parameters) throws Exception {
@@ -276,14 +266,14 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 				aggFunction.initializeAggregate();
 			}
 		}
-		
+
 		@Override
 		public void reduce(Iterable<T> records, Collector<T> out) {
 			final AggregationFunction<Object>[] aggFunctions = this.aggFunctions;
 			final int[] fieldPositions = this.fieldPositions;
 
 			// aggregators are initialized from before
-			
+
 			T outT = null;
 			for (T record : records) {
 				outT = record;
@@ -293,13 +283,13 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 					aggFunctions[i].aggregate(val);
 				}
 			}
-			
+
 			for (int i = 0; i < fieldPositions.length; i++) {
 				Object aggVal = aggFunctions[i].getAggregate();
 				outT.setField(aggVal, fieldPositions[i]);
 				aggFunctions[i].initializeAggregate();
 			}
-			
+
 			out.collect(outT);
 		}
 
@@ -307,6 +297,6 @@ public class AggregateOperator<IN> extends SingleInputOperator<IN, IN, Aggregate
 		public void combine(Iterable<T> records, Collector<T> out) {
 			reduce(records, out);
 		}
-		
+
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/BulkIterationResultSet.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/BulkIterationResultSet.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/BulkIterationResultSet.java
index e6a4627..d1ca742 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/BulkIterationResultSet.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/BulkIterationResultSet.java
@@ -23,6 +23,10 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 
+/**
+ * Resulting {@link DataSet} of bulk iterations.
+ * @param <T>
+ */
 @Internal
 public class BulkIterationResultSet<T> extends DataSet<T> {
 
@@ -41,8 +45,7 @@ public class BulkIterationResultSet<T> extends DataSet<T> {
 
 	BulkIterationResultSet(ExecutionEnvironment context,
 		TypeInformation<T> type, IterativeDataSet<T> iterationHead,
-		DataSet<T> nextPartialSolution, DataSet<?> terminationCriterion)
-	{
+		DataSet<T> nextPartialSolution, DataSet<?> terminationCriterion) {
 		super(context, type);
 		this.iterationHead = iterationHead;
 		this.nextPartialSolution = nextPartialSolution;

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupOperator.java
index 3c838cc..1d1ea8f 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupOperator.java
@@ -18,13 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.security.InvalidParameterException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.apache.commons.lang3.tuple.Pair;
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.InvalidProgramException;
@@ -33,6 +26,9 @@ import org.apache.flink.api.common.functions.Partitioner;
 import org.apache.flink.api.common.operators.BinaryOperatorInformation;
 import org.apache.flink.api.common.operators.DualInputSemanticProperties;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
+import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.Order;
 import org.apache.flink.api.common.operators.Ordering;
@@ -40,12 +36,9 @@ import org.apache.flink.api.common.operators.base.CoGroupOperatorBase;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.Utils;
+import org.apache.flink.api.java.functions.KeySelector;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.operators.DeltaIteration.SolutionSetPlaceHolder;
-import org.apache.flink.api.java.functions.KeySelector;
-import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
-import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.java.operators.translation.PlanBothUnwrappingCoGroupOperator;
 import org.apache.flink.api.java.operators.translation.PlanLeftUnwrappingCoGroupOperator;
 import org.apache.flink.api.java.operators.translation.PlanRightUnwrappingCoGroupOperator;
@@ -53,13 +46,21 @@ import org.apache.flink.api.java.tuple.Tuple;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.Pair;
+
+import java.security.InvalidParameterException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
 /**
- * A {@link DataSet} that is the result of a CoGroup transformation. 
- * 
+ * A {@link DataSet} that is the result of a CoGroup transformation.
+ *
  * @param <I1> The type of the first input DataSet of the CoGroup transformation.
  * @param <I2> The type of the second input DataSet of the CoGroup transformation.
  * @param <OUT> The type of the result of the CoGroup transformation.
- * 
+ *
  * @see DataSet
  */
 @Public
@@ -69,33 +70,30 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 	private final Keys<I1> keys1;
 	private final Keys<I2> keys2;
-	
+
 	private final String defaultName;
-	
+
 	private final List<Pair<Integer, Order>> groupSortKeyOrderFirst;
 	private final List<Pair<Integer, Order>> groupSortKeyOrderSecond;
-	
-	private Partitioner<?> customPartitioner;
 
+	private Partitioner<?> customPartitioner;
 
 	public CoGroupOperator(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2,
 							CoGroupFunction<I1, I2, OUT> function, TypeInformation<OUT> returnType,
-							Partitioner<?> customPartitioner, String defaultName)
-	{
+							Partitioner<?> customPartitioner, String defaultName) {
 		this(input1, input2, keys1, keys2, function, returnType, null, null, customPartitioner, defaultName);
 	}
-	
+
 	public CoGroupOperator(DataSet<I1> input1, DataSet<I2> input2, Keys<I1> keys1, Keys<I2> keys2,
 			CoGroupFunction<I1, I2, OUT> function, TypeInformation<OUT> returnType,
 			List<Pair<Integer, Order>> groupSortKeyOrderFirst, List<Pair<Integer, Order>> groupSortKeyOrderSecond,
-			Partitioner<?> customPartitioner, String defaultName)
-	{
+			Partitioner<?> customPartitioner, String defaultName) {
 		super(input1, input2, returnType);
 
 		this.function = function;
 		this.customPartitioner = customPartitioner;
 		this.defaultName = defaultName;
-		
+
 		this.groupSortKeyOrderFirst = groupSortKeyOrderFirst == null ? Collections.<Pair<Integer, Order>>emptyList() : groupSortKeyOrderFirst;
 		this.groupSortKeyOrderSecond = groupSortKeyOrderSecond == null ? Collections.<Pair<Integer, Order>>emptyList() : groupSortKeyOrderSecond;
 
@@ -126,7 +124,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 		UdfOperatorUtils.analyzeDualInputUdf(this, CoGroupFunction.class, defaultName, function, keys1, keys2);
 	}
-	
+
 	@Override
 	protected CoGroupFunction<I1, I2, OUT> getFunction() {
 		return function;
@@ -138,16 +136,16 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		DualInputSemanticProperties props = super.getSemanticProperties();
 
 		// offset semantic information by extracted key fields
-		if(props != null &&
+		if (props != null &&
 					(this.keys1 instanceof SelectorFunctionKeys ||
 					this.keys2 instanceof SelectorFunctionKeys)) {
 
 			int numFields1 = this.getInput1Type().getTotalFields();
 			int numFields2 = this.getInput2Type().getTotalFields();
 			int offset1 = (this.keys1 instanceof SelectorFunctionKeys) ?
-					((SelectorFunctionKeys<?,?>) this.keys1).getKeyType().getTotalFields() : 0;
+					((SelectorFunctionKeys<?, ?>) this.keys1).getKeyType().getTotalFields() : 0;
 			int offset2 = (this.keys2 instanceof SelectorFunctionKeys) ?
-					((SelectorFunctionKeys<?,?>) this.keys2).getKeyType().getTotalFields() : 0;
+					((SelectorFunctionKeys<?, ?>) this.keys2).getKeyType().getTotalFields() : 0;
 
 			props = SemanticPropUtil.addSourceFieldOffsets(props, numFields1, numFields2, offset1, offset2);
 		}
@@ -164,14 +162,14 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 	protected Keys<I2> getKeys2() {
 		return this.keys2;
 	}
-	
+
 	/**
 	 * Sets a custom partitioner for the CoGroup operation. The partitioner will be called on the join keys to determine
 	 * the partition a key should be assigned to. The partitioner is evaluated on both inputs in the
 	 * same way.
-	 * <p>
-	 * NOTE: A custom partitioner can only be used with single-field CoGroup keys, not with composite CoGroup keys.
-	 * 
+	 *
+	 * <p>NOTE: A custom partitioner can only be used with single-field CoGroup keys, not with composite CoGroup keys.
+	 *
 	 * @param partitioner The custom partitioner to be used.
 	 * @return This CoGroup operator, to allow for function chaining.
 	 */
@@ -183,10 +181,10 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		this.customPartitioner = getInput1().clean(partitioner);
 		return this;
 	}
-	
+
 	/**
 	 * Gets the custom partitioner used by this join, or {@code null}, if none is set.
-	 * 
+	 *
 	 * @return The custom partitioner used by this join;
 	 */
 	public Partitioner<?> getPartitioner() {
@@ -196,7 +194,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 	@Override
 	@Internal
 	protected org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
-		
+
 		String name = getName() != null ? getName() : "CoGroup at " + defaultName;
 		try {
 			keys1.areCompatible(keys2);
@@ -204,7 +202,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		catch (IncompatibleKeysException e) {
 			throw new InvalidProgramException("The types of the key fields do not match.", e);
 		}
-		
+
 		final org.apache.flink.api.common.operators.base.CoGroupOperatorBase<?, ?, OUT, ?> po;
 
 		if (keys1 instanceof SelectorFunctionKeys
@@ -246,8 +244,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			po = translateSelectorFunctionCoGroupLeft(selectorKeys1, logicalKeyPositions2, function,
 							getInput2Type(), getResultType(), name, input1, input2);
 		}
-		else if ( keys1 instanceof Keys.ExpressionKeys && keys2 instanceof Keys.ExpressionKeys)
-			{
+		else if (keys1 instanceof Keys.ExpressionKeys && keys2 instanceof Keys.ExpressionKeys) {
 			try {
 				keys1.areCompatible(keys2);
 			} catch (IncompatibleKeysException e) {
@@ -256,12 +253,12 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 			int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
 			int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
-			
+
 			CoGroupOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>> op =
 					new CoGroupOperatorBase<>(
 							function, new BinaryOperatorInformation<>(getInput1Type(), getInput2Type(), getResultType()),
 							logicalKeyPositions1, logicalKeyPositions2, name);
-			
+
 			op.setFirstInput(input1);
 			op.setSecondInput(input2);
 			po = op;
@@ -269,11 +266,11 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		else {
 			throw new UnsupportedOperationException("Unrecognized or incompatible key types.");
 		}
-		
+
 		// configure shared characteristics
 		po.setParallelism(getParallelism());
 		po.setCustomPartitioner(customPartitioner);
-		
+
 		if (groupSortKeyOrderFirst.size() > 0) {
 			Ordering o = new Ordering();
 			for (Pair<Integer, Order> entry : groupSortKeyOrderFirst) {
@@ -288,17 +285,15 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			}
 			po.setGroupOrderForInputTwo(o);
 		}
-		
+
 		return po;
 	}
 
-
 	private static <I1, I2, K, OUT> PlanBothUnwrappingCoGroupOperator<I1, I2, OUT, K> translateSelectorFunctionCoGroup(
 			SelectorFunctionKeys<I1, ?> rawKeys1, SelectorFunctionKeys<I2, ?> rawKeys2,
 			CoGroupFunction<I1, I2, OUT> function,
 			TypeInformation<OUT> outputType, String name,
-			Operator<I1> input1, Operator<I2> input2)
-	{
+			Operator<I1> input1, Operator<I2> input2) {
 		@SuppressWarnings("unchecked")
 		final SelectorFunctionKeys<I1, K> keys1 = (SelectorFunctionKeys<I1, K>) rawKeys1;
 		@SuppressWarnings("unchecked")
@@ -323,9 +318,8 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			int[] logicalKeyPositions1, SelectorFunctionKeys<I2, ?> rawKeys2,
 			CoGroupFunction<I1, I2, OUT> function,
 			TypeInformation<I1> inputType1, TypeInformation<OUT> outputType, String name,
-			Operator<I1> input1, Operator<I2> input2)
-	{
-		if(!inputType1.isTupleType()) {
+			Operator<I1> input1, Operator<I2> input2) {
+		if (!inputType1.isTupleType()) {
 			throw new InvalidParameterException("Should not happen.");
 		}
 
@@ -333,7 +327,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		final SelectorFunctionKeys<I2, K> keys2 = (SelectorFunctionKeys<I2, K>) rawKeys2;
 		final TypeInformation<Tuple2<K, I2>> typeInfoWithKey2 = KeyFunctions.createTypeWithKey(keys2);
 		final Operator<Tuple2<K, I2>> keyedInput2 = KeyFunctions.appendKeyExtractor(input2, keys2);
-		
+
 		final PlanRightUnwrappingCoGroupOperator<I1, I2, OUT, K> cogroup =
 				new PlanRightUnwrappingCoGroupOperator<>(
 						function,
@@ -354,9 +348,8 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			SelectorFunctionKeys<I1, ?> rawKeys1, int[] logicalKeyPositions2,
 			CoGroupFunction<I1, I2, OUT> function,
 			TypeInformation<I2> inputType2, TypeInformation<OUT> outputType, String name,
-			Operator<I1> input1, Operator<I2> input2)
-	{
-		if(!inputType2.isTupleType()) {
+			Operator<I1> input1, Operator<I2> input2) {
+		if (!inputType2.isTupleType()) {
 			throw new InvalidParameterException("Should not happen.");
 		}
 
@@ -386,8 +379,9 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 	// --------------------------------------------------------------------------------------------
 
 	/**
-	 * Intermediate step of a CoGroup transformation. <br>
-	 * To continue the CoGroup transformation, select the grouping key of the first input {@link DataSet} by calling 
+	 * Intermediate step of a CoGroup transformation.
+	 *
+	 * <p>To continue the CoGroup transformation, select the grouping key of the first input {@link DataSet} by calling
 	 * {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets#where(int...)} or {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets#where(KeySelector)}.
 	 *
 	 * @param <I1> The type of the first input DataSet of the CoGroup transformation.
@@ -408,15 +402,17 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		}
 
 		/**
-		 * Continues a CoGroup transformation. <br>
-		 * Defines the {@link Tuple} fields of the first co-grouped {@link DataSet} that should be used as grouping keys.<br>
-		 * <b>Note: Fields can only be selected as grouping keys on Tuple DataSets.</b><br>
+		 * Continues a CoGroup transformation.
+		 *
+		 * <p>Defines the {@link Tuple} fields of the first co-grouped {@link DataSet} that should be used as grouping keys.
+		 *
+		 * <p><b>Note: Fields can only be selected as grouping keys on Tuple DataSets.</b>
 		 *
 		 *
 		 * @param fields The indexes of the Tuple fields of the first co-grouped DataSets that should be used as keys.
 		 * @return An incomplete CoGroup transformation.
 		 *           Call {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate#equalTo(int...)} to continue the CoGroup.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -425,10 +421,10 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		}
 
 		/**
-		 * Continues a CoGroup transformation. <br>
-		 * Defines the fields of the first co-grouped {@link DataSet} that should be used as grouping keys. Fields
-		 * are the names of member fields of the underlying type of the data set.
+		 * Continues a CoGroup transformation.
 		 *
+		 * <p>Defines the fields of the first co-grouped {@link DataSet} that should be used as grouping keys. Fields
+		 * are the names of member fields of the underlying type of the data set.
 		 *
 		 * @param fields The  fields of the first co-grouped DataSets that should be used as keys.
 		 * @return An incomplete CoGroup transformation.
@@ -442,14 +438,15 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		}
 
 		/**
-		 * Continues a CoGroup transformation and defines a {@link KeySelector} function for the first co-grouped {@link DataSet}.<br>
-		 * The KeySelector function is called for each element of the first DataSet and extracts a single 
-		 * key value on which the DataSet is grouped. <br>
-		 * 
+		 * Continues a CoGroup transformation and defines a {@link KeySelector} function for the first co-grouped {@link DataSet}.
+		 *
+		 * <p>The KeySelector function is called for each element of the first DataSet and extracts a single
+		 * key value on which the DataSet is grouped.
+		 *
 		 * @param keyExtractor The KeySelector function which extracts the key values from the DataSet on which it is grouped.
-		 * @return An incomplete CoGroup transformation. 
+		 * @return An incomplete CoGroup transformation.
 		 *           Call {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate#equalTo(int...)} to continue the CoGroup.
-		 * 
+		 *
 		 * @see KeySelector
 		 * @see DataSet
 		 */
@@ -461,8 +458,9 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 		// ----------------------------------------------------------------------------------------
 
 		/**
-		 * Intermediate step of a CoGroup transformation. <br>
-		 * To continue the CoGroup transformation, select the grouping key of the second input {@link DataSet} by calling 
+		 * Intermediate step of a CoGroup transformation.
+		 *
+		 * <p>To continue the CoGroup transformation, select the grouping key of the second input {@link DataSet} by calling
 		 * {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate#equalTo(int...)} or {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate#equalTo(KeySelector)}.
 		 *
 		 */
@@ -483,10 +481,10 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			}
 
 			/**
-			 * Continues a CoGroup transformation and defines the {@link Tuple} fields of the second co-grouped 
-			 * {@link DataSet} that should be used as grouping keys.<br>
-			 * <b>Note: Fields can only be selected as grouping keys on Tuple DataSets.</b><br>
+			 * Continues a CoGroup transformation and defines the {@link Tuple} fields of the second co-grouped
+			 * {@link DataSet} that should be used as grouping keys.
 			 *
+			 * <p><b>Note: Fields can only be selected as grouping keys on Tuple DataSets.</b>
 			 *
 			 * @param fields The indexes of the Tuple fields of the second co-grouped DataSet that should be used as keys.
 			 * @return An incomplete CoGroup transformation.
@@ -498,8 +496,7 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 			/**
 			 * Continues a CoGroup transformation and defines the fields of the second co-grouped
-			 * {@link DataSet} that should be used as grouping keys.<br>
-			 *
+			 * {@link DataSet} that should be used as grouping keys.
 			 *
 			 * @param fields The  fields of the first co-grouped DataSets that should be used as keys.
 			 * @return An incomplete CoGroup transformation.
@@ -510,12 +507,13 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			}
 
 			/**
-			 * Continues a CoGroup transformation and defines a {@link KeySelector} function for the second co-grouped {@link DataSet}.<br>
-			 * The KeySelector function is called for each element of the second DataSet and extracts a single 
-			 * key value on which the DataSet is grouped. <br>
-			 * 
+			 * Continues a CoGroup transformation and defines a {@link KeySelector} function for the second co-grouped {@link DataSet}.
+			 *
+			 * <p>The KeySelector function is called for each element of the second DataSet and extracts a single
+			 * key value on which the DataSet is grouped.
+			 *
 			 * @param keyExtractor The KeySelector function which extracts the key values from the second DataSet on which it is grouped.
-			 * @return An incomplete CoGroup transformation. 
+			 * @return An incomplete CoGroup transformation.
 			 *           Call {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate.CoGroupOperatorWithoutFunction#with(org.apache.flink.api.common.functions.CoGroupFunction)} to finalize the CoGroup transformation.
 			 */
 			public <K> CoGroupOperatorWithoutFunction equalTo(KeySelector<I2, K> keyExtractor) {
@@ -524,8 +522,9 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 			}
 
 			/**
-			 * Intermediate step of a CoGroup transformation. <br>
-			 * To continue the CoGroup transformation, provide a {@link org.apache.flink.api.common.functions.RichCoGroupFunction} by calling
+			 * Intermediate step of a CoGroup transformation.
+			 *
+			 * <p>To continue the CoGroup transformation, provide a {@link org.apache.flink.api.common.functions.RichCoGroupFunction} by calling
 			 * {@link org.apache.flink.api.java.operators.CoGroupOperator.CoGroupOperatorSets.CoGroupOperatorSetsPredicate.CoGroupOperatorWithoutFunction#with(org.apache.flink.api.common.functions.CoGroupFunction)}.
 			 *
 			 */
@@ -539,22 +538,25 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 				}
 				try {
 					keys1.areCompatible(keys2);
-				} catch(IncompatibleKeysException ike) {
+				} catch (IncompatibleKeysException ike) {
 					throw new InvalidProgramException("The pair of co-group keys are not compatible with each other.", ike);
 				}
 
 				return new CoGroupOperatorWithoutFunction(keys2);
 			}
-			
+
 			// ------------------------------------------------------------------------------------
 
+			/**
+			 * An intermediate {@link CoGroupOperator} without {@link CoGroupFunction} applied yet.
+			 */
 			public final class CoGroupOperatorWithoutFunction {
-				
+
 				private final Keys<I2> keys2;
-				
+
 				private final List<Pair<Integer, Order>> groupSortKeyOrderFirst;
 				private final List<Pair<Integer, Order>> groupSortKeyOrderSecond;
-				
+
 				private Partitioner<?> customPartitioner;
 
 				private CoGroupOperatorWithoutFunction(Keys<I2> keys2) {
@@ -566,18 +568,18 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 					}
 
 					this.keys2 = keys2;
-					
+
 					this.groupSortKeyOrderFirst = new ArrayList<>();
 					this.groupSortKeyOrderSecond = new ArrayList<>();
 				}
-				
+
 				/**
 				 * Sets a custom partitioner for the CoGroup operation. The partitioner will be called on the join keys to determine
 				 * the partition a key should be assigned to. The partitioner is evaluated on both inputs in the
 				 * same way.
-				 * <p>
-				 * NOTE: A custom partitioner can only be used with single-field CoGroup keys, not with composite CoGroup keys.
-				 * 
+				 *
+				 * <p>NOTE: A custom partitioner can only be used with single-field CoGroup keys, not with composite CoGroup keys.
+				 *
 				 * @param partitioner The custom partitioner to be used.
 				 * @return This CoGroup operator, to allow for function chaining.
 				 */
@@ -589,10 +591,10 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 					this.customPartitioner = input1.clean(partitioner);
 					return this;
 				}
-				
+
 				/**
 				 * Gets the custom partitioner used by this join, or {@code null}, if none is set.
-				 * 
+				 *
 				 * @return The custom partitioner used by this join;
 				 */
 				public Partitioner<?> getPartitioner() {
@@ -600,12 +602,13 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 				}
 
 				/**
-				 * Finalizes a CoGroup transformation by applying a {@link org.apache.flink.api.common.functions.RichCoGroupFunction} to groups of elements with identical keys.<br>
-				 * Each CoGroupFunction call returns an arbitrary number of keys. 
-				 * 
+				 * Finalizes a CoGroup transformation by applying a {@link org.apache.flink.api.common.functions.RichCoGroupFunction} to groups of elements with identical keys.
+				 *
+				 * <p>Each CoGroupFunction call returns an arbitrary number of keys.
+				 *
 				 * @param function The CoGroupFunction that is called for all groups of elements with identical keys.
 				 * @return An CoGroupOperator that represents the co-grouped result DataSet.
-				 * 
+				 *
 				 * @see org.apache.flink.api.common.functions.RichCoGroupFunction
 				 * @see DataSet
 				 */
@@ -615,26 +618,28 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 					}
 					TypeInformation<R> returnType = TypeExtractor.getCoGroupReturnTypes(function, input1.getType(), input2.getType(),
 							Utils.getCallLocationName(), true);
-					
+
 					return new CoGroupOperator<>(input1, input2, keys1, keys2, input1.clean(function), returnType,
 							groupSortKeyOrderFirst, groupSortKeyOrderSecond,
 							customPartitioner, Utils.getCallLocationName());
 				}
-				
+
 				// --------------------------------------------------------------------------------
 				//  Group Operations
 				// --------------------------------------------------------------------------------
-				
+
 				/**
 				 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group in the first input on the
-				 * specified field in the specified {@link Order}.<br>
-				 * <b>Note: Only groups of Tuple elements and Pojos can be sorted.</b><br>
-				 * Groups can be sorted by multiple fields by chaining {@link #sortFirstGroup(int, Order)} calls.
-				 * 
+				 * specified field in the specified {@link Order}.
+				 *
+				 * <p><b>Note: Only groups of Tuple elements and Pojos can be sorted.</b>
+				 *
+				 * <p>Groups can be sorted by multiple fields by chaining {@link #sortFirstGroup(int, Order)} calls.
+				 *
 				 * @param field The Tuple field on which the group is sorted.
 				 * @param order The Order in which the specified Tuple field is sorted.
 				 * @return A SortedGrouping with specified order of group element.
-				 * 
+				 *
 				 * @see org.apache.flink.api.java.tuple.Tuple
 				 * @see Order
 				 */
@@ -642,24 +647,26 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 					ExpressionKeys<I1> ek = new ExpressionKeys<>(field, input1.getType());
 					int[] groupOrderKeys = ek.computeLogicalKeyPositions();
-					
+
 					for (int key : groupOrderKeys) {
 						this.groupSortKeyOrderFirst.add(new ImmutablePair<>(key, order));
 					}
-					
+
 					return this;
 				}
-				
+
 				/**
 				 * Sorts {@link org.apache.flink.api.java.tuple.Tuple} elements within a group in the second input on the
-				 * specified field in the specified {@link Order}.<br>
-				 * <b>Note: Only groups of Tuple elements and Pojos can be sorted.</b><br>
-				 * Groups can be sorted by multiple fields by chaining {@link #sortSecondGroup(int, Order)} calls.
-				 * 
+				 * specified field in the specified {@link Order}.
+				 *
+				 * <p><b>Note: Only groups of Tuple elements and Pojos can be sorted.</b>
+				 *
+				 * <p>Groups can be sorted by multiple fields by chaining {@link #sortSecondGroup(int, Order)} calls.
+				 *
 				 * @param field The Tuple field on which the group is sorted.
 				 * @param order The Order in which the specified Tuple field is sorted.
 				 * @return A SortedGrouping with specified order of group element.
-				 * 
+				 *
 				 * @see org.apache.flink.api.java.tuple.Tuple
 				 * @see Order
 				 */
@@ -667,57 +674,59 @@ public class CoGroupOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OU
 
 					ExpressionKeys<I2> ek = new ExpressionKeys<>(field, input2.getType());
 					int[] groupOrderKeys = ek.computeLogicalKeyPositions();
-					
+
 					for (int key : groupOrderKeys) {
 						this.groupSortKeyOrderSecond.add(new ImmutablePair<>(key, order));
 					}
-					
+
 					return this;
 				}
-				
+
 				/**
 				 * Sorts Pojo or {@link org.apache.flink.api.java.tuple.Tuple} elements within a group in the first input on the
-				 * specified field in the specified {@link Order}.<br>
-				 * Groups can be sorted by multiple fields by chaining {@link #sortFirstGroup(String, Order)} calls.
-				 * 
+				 * specified field in the specified {@link Order}.
+				 *
+				 * <p>Groups can be sorted by multiple fields by chaining {@link #sortFirstGroup(String, Order)} calls.
+				 *
 				 * @param fieldExpression The expression to the field on which the group is to be sorted.
 				 * @param order The Order in which the specified Tuple field is sorted.
 				 * @return A SortedGrouping with specified order of group element.
-				 * 
+				 *
 				 * @see Order
 				 */
 				public CoGroupOperatorWithoutFunction sortFirstGroup(String fieldExpression, Order order) {
 
 					ExpressionKeys<I1> ek = new ExpressionKeys<>(fieldExpression, input1.getType());
 					int[] groupOrderKeys = ek.computeLogicalKeyPositions();
-					
+
 					for (int key : groupOrderKeys) {
 						this.groupSortKeyOrderFirst.add(new ImmutablePair<>(key, order));
 					}
-					
+
 					return this;
 				}
-				
+
 				/**
 				 * Sorts Pojo or {@link org.apache.flink.api.java.tuple.Tuple} elements within a group in the second input on the
-				 * specified field in the specified {@link Order}.<br>
-				 * Groups can be sorted by multiple fields by chaining {@link #sortSecondGroup(String, Order)} calls.
-				 * 
+				 * specified field in the specified {@link Order}.
+				 *
+				 * <p>Groups can be sorted by multiple fields by chaining {@link #sortSecondGroup(String, Order)} calls.
+				 *
 				 * @param fieldExpression The expression to the field on which the group is to be sorted.
 				 * @param order The Order in which the specified Tuple field is sorted.
 				 * @return A SortedGrouping with specified order of group element.
-				 * 
+				 *
 				 * @see Order
 				 */
 				public CoGroupOperatorWithoutFunction sortSecondGroup(String fieldExpression, Order order) {
 
 					ExpressionKeys<I2> ek = new ExpressionKeys<>(fieldExpression, input2.getType());
 					int[] groupOrderKeys = ek.computeLogicalKeyPositions();
-					
+
 					for (int key : groupOrderKeys) {
 						this.groupSortKeyOrderSecond.add(new ImmutablePair<>(key, order));
 					}
-					
+
 					return this;
 				}
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupRawOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupRawOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupRawOperator.java
index 9a6d800..e607ac1 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupRawOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/CoGroupRawOperator.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.api.java.operators;
 
 import org.apache.flink.annotation.Internal;
@@ -23,19 +24,19 @@ import org.apache.flink.api.common.functions.CoGroupFunction;
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.api.common.operators.BinaryOperatorInformation;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.base.CoGroupRawOperatorBase;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.common.operators.Keys.IncompatibleKeysException;
 
 /**
- * A {@link DataSet} that is the result of a CoGroup transformation. 
- * 
+ * A {@link DataSet} that is the result of a CoGroup transformation.
+ *
  * @param <I1> The type of the first input DataSet of the CoGroup transformation.
  * @param <I2> The type of the second input DataSet of the CoGroup transformation.
  * @param <OUT> The type of the result of the CoGroup transformation.
- * 
+ *
  * @see DataSet
  */
 @Internal
@@ -95,10 +96,12 @@ public class CoGroupRawOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2,
 			int[] logicalKeyPositions1 = keys1.computeLogicalKeyPositions();
 			int[] logicalKeyPositions2 = keys2.computeLogicalKeyPositions();
 
-			CoGroupRawOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>> po
-					= new CoGroupRawOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>>(
-							function, new BinaryOperatorInformation<I1, I2, OUT>(getInput1Type(), getInput2Type(), 
-									getResultType()), logicalKeyPositions1, logicalKeyPositions2, name);
+			CoGroupRawOperatorBase<I1, I2, OUT, CoGroupFunction<I1, I2, OUT>> po = new CoGroupRawOperatorBase<>(
+				function,
+				new BinaryOperatorInformation<>(getInput1Type(), getInput2Type(), getResultType()),
+				logicalKeyPositions1,
+				logicalKeyPositions2,
+				name);
 
 			// set inputs
 			po.setFirstInput(input1);

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java
index 98956bb..711c215 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/CrossOperator.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.Arrays;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.annotation.PublicEvolving;
@@ -34,12 +32,37 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.Utils;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
+import org.apache.flink.api.java.tuple.Tuple;
+import org.apache.flink.api.java.tuple.Tuple1;
+import org.apache.flink.api.java.tuple.Tuple10;
+import org.apache.flink.api.java.tuple.Tuple11;
+import org.apache.flink.api.java.tuple.Tuple12;
+import org.apache.flink.api.java.tuple.Tuple13;
+import org.apache.flink.api.java.tuple.Tuple14;
+import org.apache.flink.api.java.tuple.Tuple15;
+import org.apache.flink.api.java.tuple.Tuple16;
+import org.apache.flink.api.java.tuple.Tuple17;
+import org.apache.flink.api.java.tuple.Tuple18;
+import org.apache.flink.api.java.tuple.Tuple19;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple20;
+import org.apache.flink.api.java.tuple.Tuple21;
+import org.apache.flink.api.java.tuple.Tuple22;
+import org.apache.flink.api.java.tuple.Tuple23;
+import org.apache.flink.api.java.tuple.Tuple24;
+import org.apache.flink.api.java.tuple.Tuple25;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.api.java.tuple.Tuple4;
+import org.apache.flink.api.java.tuple.Tuple5;
+import org.apache.flink.api.java.tuple.Tuple6;
+import org.apache.flink.api.java.tuple.Tuple7;
+import org.apache.flink.api.java.tuple.Tuple8;
+import org.apache.flink.api.java.tuple.Tuple9;
 import org.apache.flink.api.java.typeutils.TupleTypeInfo;
 import org.apache.flink.api.java.typeutils.TypeExtractor;
 import org.apache.flink.util.Preconditions;
 
-//CHECKSTYLE.OFF: AvoidStarImport - Needed for TupleGenerator
-import org.apache.flink.api.java.tuple.*;
+import java.util.Arrays;
 
 /**
  * A {@link DataSet} that is the result of a Cross transformation.
@@ -54,17 +77,16 @@ import org.apache.flink.api.java.tuple.*;
 public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT, CrossOperator<I1, I2, OUT>> {
 
 	private final CrossFunction<I1, I2, OUT> function;
-	
+
 	private final String defaultName;
-	
+
 	private final CrossHint hint;
 
 	public CrossOperator(DataSet<I1> input1, DataSet<I2> input2,
 							CrossFunction<I1, I2, OUT> function,
 							TypeInformation<OUT> returnType,
 							CrossHint hint,
-							String defaultName)
-	{
+							String defaultName) {
 		super(input1, input2, returnType);
 
 		this.function = function;
@@ -83,21 +105,20 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 	public CrossHint getCrossHint() {
 		return hint;
 	}
-	
+
 	private String getDefaultName() {
 		return defaultName;
 	}
 
 	@Override
-	protected org.apache.flink.api.common.operators.base.CrossOperatorBase<I1, I2, OUT, CrossFunction<I1,I2,OUT>> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
-		
-		String name = getName() != null ? getName() : "Cross at "+defaultName;
+	protected CrossOperatorBase<I1, I2, OUT, CrossFunction<I1, I2, OUT>> translateToDataFlow(Operator<I1> input1, Operator<I2> input2) {
+
+		String name = getName() != null ? getName() : "Cross at " + defaultName;
 		// create operator
 		CrossOperatorBase<I1, I2, OUT, CrossFunction<I1, I2, OUT>> po =
-				new CrossOperatorBase<I1, I2, OUT, CrossFunction<I1, I2, OUT>>(function, 
+				new CrossOperatorBase<I1, I2, OUT, CrossFunction<I1, I2, OUT>>(function,
 						new BinaryOperatorInformation<I1, I2, OUT>(getInput1Type(), getInput2Type(), getResultType()),
 						name);
-		
 
 		po.setFirstInput(input1);
 		po.setSecondInput(input2);
@@ -112,12 +133,13 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 	// --------------------------------------------------------------------------------------------
 
 	/**
-	 * A Cross transformation that wraps pairs of crossed elements into {@link Tuple2}.<br>
-	 * It also represents the {@link DataSet} that is the result of a Cross transformation. 
-	 * 
+	 * A Cross transformation that wraps pairs of crossed elements into {@link Tuple2}.
+	 *
+	 * <p>It also represents the {@link DataSet} that is the result of a Cross transformation.
+	 *
 	 * @param <I1> The type of the first input DataSet of the Cross transformation.
 	 * @param <I2> The type of the second input DataSet of the Cross transformation.
-	 * 
+	 *
 	 * @see Tuple2
 	 * @see DataSet
 	 */
@@ -133,12 +155,13 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Finalizes a Cross transformation by applying a {@link CrossFunction} to each pair of crossed elements.<br>
-		 * Each CrossFunction call returns exactly one element. 
-		 * 
+		 * Finalizes a Cross transformation by applying a {@link CrossFunction} to each pair of crossed elements.
+		 *
+		 * <p>Each CrossFunction call returns exactly one element.
+		 *
 		 * @param function The CrossFunction that is called for each pair of crossed elements.
 		 * @return An CrossOperator that represents the crossed result DataSet
-		 * 
+		 *
 		 * @see CrossFunction
 		 * @see DataSet
 		 */
@@ -151,13 +174,14 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 			return new CrossOperator<I1, I2, R>(getInput1(), getInput2(), clean(function), returnType,
 					getCrossHint(), Utils.getCallLocationName());
 		}
-		
+
 		/**
-		 * Initiates a ProjectCross transformation and projects the first cross input<br>
-		 * If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Initiates a ProjectCross transformation and projects the first cross input.
+		 *
+		 * <p>If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.
 		 *
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectSecond(int...)}.
 		 *
@@ -176,13 +200,14 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 			return new CrossProjection<I1, I2>(getInput1(), getInput2(), firstFieldIndexes, null, getCrossHint())
 						.projectTupleX();
 		}
-		
+
 		/**
-		 * Initiates a ProjectCross transformation and projects the second cross input<br>
-		 * If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Initiates a ProjectCross transformation and projects the second cross input.
+		 *
+		 * <p>If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.
 		 *
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectSecond(int...)}.
 		 *
@@ -201,13 +226,14 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 			return new CrossProjection<I1, I2>(getInput1(), getInput2(), null, secondFieldIndexes, getCrossHint())
 						.projectTupleX();
 		}
-		
+
 	}
 
 	/**
 	 * A Cross transformation that projects crossing elements or fields of crossing {@link Tuple Tuples}
-	 * into result {@link Tuple Tuples}. <br>
-	 * It also represents the {@link DataSet} that is the result of a Cross transformation.
+	 * into result {@link Tuple Tuples}.
+	 *
+	 * <p>It also represents the {@link DataSet} that is the result of a Cross transformation.
 	 *
 	 * @param <I1> The type of the first input DataSet of the Cross transformation.
 	 * @param <I2> The type of the second input DataSet of the Cross transformation.
@@ -218,26 +244,24 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 	 */
 	@Public
 	public static final class ProjectCross<I1, I2, OUT extends Tuple> extends CrossOperator<I1, I2, OUT> {
-		
+
 		private CrossProjection<I1, I2> crossProjection;
 
 		protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst,
-				TupleTypeInfo<OUT> returnType, CrossHint hint)
-		{
+				TupleTypeInfo<OUT> returnType, CrossHint hint) {
 			super(input1, input2,
 					new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
 					returnType, hint, "unknown");
-			
+
 			crossProjection = null;
 		}
-		
+
 		protected ProjectCross(DataSet<I1> input1, DataSet<I2> input2, int[] fields, boolean[] isFromFirst,
-				TupleTypeInfo<OUT> returnType, CrossProjection<I1, I2> crossProjection, CrossHint hint)
-		{
+				TupleTypeInfo<OUT> returnType, CrossProjection<I1, I2> crossProjection, CrossHint hint) {
 			super(input1, input2,
 				new ProjectCrossFunction<I1, I2, OUT>(fields, isFromFirst, returnType.createSerializer(input1.getExecutionEnvironment().getConfig()).createInstance()),
 				returnType, hint, "unknown");
-			
+
 			this.crossProjection = crossProjection;
 		}
 
@@ -245,17 +269,18 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		protected ProjectCrossFunction<I1, I2, OUT> getFunction() {
 			return (ProjectCrossFunction<I1, I2, OUT>) super.getFunction();
 		}
-		
+
 		/**
-		 * Continues a ProjectCross transformation and adds fields of the first cross input to the projection.<br>
-		 * If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectCross transformation and adds fields of the first cross input to the projection.
 		 *
-		 * Additional fields of the first and second input can be added by chaining the method calls of
+		 * <p>If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Additional fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectSecond(int...)}.
 		 *
-		 * <b>Note: With the current implementation, the Project transformation looses type information.</b>
+		 * <p><b>Note: With the current implementation, the Project transformation looses type information.</b>
 		 *
 		 * @param firstFieldIndexes If the first input is a Tuple DataSet, the indexes of the selected fields.
 		 * 					   For a non-Tuple DataSet, do not provide parameters.
@@ -269,16 +294,17 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		@SuppressWarnings("hiding")
 		public <OUT extends Tuple> ProjectCross<I1, I2, OUT> projectFirst(int... firstFieldIndexes) {
 			crossProjection = crossProjection.projectFirst(firstFieldIndexes);
-			
+
 			return crossProjection.projectTupleX();
 		}
 
 		/**
-		 * Continues a ProjectCross transformation and adds fields of the second cross input to the projection.<br>
-		 * If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectCross transformation and adds fields of the second cross input to the projection.
 		 *
-		 * Additional fields of the first and second input can be added by chaining the method calls of
+		 * <p>If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Additional fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.ProjectCross#projectSecond(int...)}.
 		 *
@@ -296,7 +322,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		@SuppressWarnings("hiding")
 		public <OUT extends Tuple> ProjectCross<I1, I2, OUT> projectSecond(int... secondFieldIndexes) {
 			crossProjection = crossProjection.projectSecond(secondFieldIndexes);
-			
+
 			return crossProjection.projectTupleX();
 		}
 
@@ -307,15 +333,15 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		@Deprecated
 		@PublicEvolving
 		public <OUT extends Tuple> CrossOperator<I1, I2, OUT> types(Class<?>... types) {
-			TupleTypeInfo<OUT> typeInfo = (TupleTypeInfo<OUT>)this.getResultType();
+			TupleTypeInfo<OUT> typeInfo = (TupleTypeInfo<OUT>) this.getResultType();
 
-			if(types.length != typeInfo.getArity()) {
+			if (types.length != typeInfo.getArity()) {
 				throw new InvalidProgramException("Provided types do not match projection.");
 			}
-			for (int i=0; i<types.length; i++) {
+			for (int i = 0; i < types.length; i++) {
 				Class<?> typeClass = types[i];
 				if (!typeClass.equals(typeInfo.getTypeAt(i).getTypeClass())) {
-					throw new InvalidProgramException("Provided type "+typeClass.getSimpleName()+" at position "+i+" does not match projection");
+					throw new InvalidProgramException("Provided type " + typeClass.getSimpleName() + " at position " + i + " does not match projection");
 				}
 			}
 			return (CrossOperator<I1, I2, OUT>) this;
@@ -330,7 +356,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		public CrossOperator<I1, I2, OUT> withForwardedFieldsSecond(String... forwardedFieldsSecond) {
 			throw new InvalidProgramException("The semantic properties (forwarded fields) are automatically calculated.");
 		}
-		
+
 		@Override
 		protected DualInputSemanticProperties extractSemanticAnnotationsFromUdf(Class<?> udfClass) {
 			// we do not extract anything, but construct the properties from the projection
@@ -339,6 +365,12 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 	}
 
+	/**
+	 * @see ProjectCross
+	 * @param <T1>
+	 * @param <T2>
+	 * @param <R>
+	 */
 	@Internal
 	public static final class ProjectCrossFunction<T1, T2, R extends Tuple> implements CrossFunction<T1, T2, R> {
 
@@ -346,7 +378,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 		private final int[] fields;
 		private final boolean[] isFromFirst;
-		
+
 		private final R outTuple;
 
 		/**
@@ -360,7 +392,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		 */
 		private ProjectCrossFunction(int[] fields, boolean[] isFromFirst, R outTupleInstance) {
 
-			if(fields.length != isFromFirst.length) {
+			if (fields.length != isFromFirst.length) {
 				throw new IllegalArgumentException("Fields and isFromFirst arrays must have same length!");
 			}
 			this.fields = fields;
@@ -369,16 +401,16 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		public R cross(T1 in1, T2 in2) {
-			for(int i=0; i<fields.length; i++) {
-				if(isFromFirst[i]) {
-					if(fields[i] >= 0) {
-						outTuple.setField(((Tuple)in1).getField(fields[i]), i);
+			for (int i = 0; i < fields.length; i++) {
+				if (isFromFirst[i]) {
+					if (fields[i] >= 0) {
+						outTuple.setField(((Tuple) in1).getField(fields[i]), i);
 					} else {
 						outTuple.setField(in1, i);
 					}
 				} else {
-					if(fields[i] >= 0) {
-						outTuple.setField(((Tuple)in2).getField(fields[i]), i);
+					if (fields[i] >= 0) {
+						outTuple.setField(((Tuple) in2).getField(fields[i]), i);
 					} else {
 						outTuple.setField(in2, i);
 					}
@@ -394,12 +426,17 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		protected boolean[] getIsFromFirst() {
 			return isFromFirst;
 		}
-		
+
 	}
 
+	/**
+	 * @see ProjectCross
+	 * @param <I1>
+	 * @param <I2>
+	 */
 	@Internal
 	public static final class CrossProjection<I1, I2> {
-		
+
 		private final DataSet<I1> ds1;
 		private final DataSet<I2> ds2;
 
@@ -408,7 +445,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 		private final int numFieldsDs1;
 		private final int numFieldsDs2;
-		
+
 		private final CrossHint hint;
 
 		public CrossProjection(DataSet<I1> ds1, DataSet<I2> ds2, int[] firstFieldIndexes, int[] secondFieldIndexes, CrossHint hint) {
@@ -420,15 +457,15 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 			boolean isFirstTuple;
 			boolean isSecondTuple;
 
-			if(ds1.getType() instanceof TupleTypeInfo) {
-				numFieldsDs1 = ((TupleTypeInfo<?>)ds1.getType()).getArity();
+			if (ds1.getType() instanceof TupleTypeInfo) {
+				numFieldsDs1 = ((TupleTypeInfo<?>) ds1.getType()).getArity();
 				isFirstTuple = true;
 			} else {
 				numFieldsDs1 = 1;
 				isFirstTuple = false;
 			}
-			if(ds2.getType() instanceof TupleTypeInfo) {
-				numFieldsDs2 = ((TupleTypeInfo<?>)ds2.getType()).getArity();
+			if (ds2.getType() instanceof TupleTypeInfo) {
+				numFieldsDs2 = ((TupleTypeInfo<?>) ds2.getType()).getArity();
 				isSecondTuple = true;
 			} else {
 				numFieldsDs2 = 1;
@@ -438,13 +475,13 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 			boolean isTuple;
 			boolean firstInput;
 
-			if(firstFieldIndexes != null && secondFieldIndexes == null) {
+			if (firstFieldIndexes != null && secondFieldIndexes == null) {
 				// index array for first input is provided
 				firstInput = true;
 				isTuple = isFirstTuple;
 				this.fieldIndexes = firstFieldIndexes;
 
-				if(this.fieldIndexes.length == 0) {
+				if (this.fieldIndexes.length == 0) {
 					// no indexes provided, treat tuple as regular object
 					isTuple = false;
 				}
@@ -454,7 +491,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 				isTuple = isSecondTuple;
 				this.fieldIndexes = secondFieldIndexes;
 
-				if(this.fieldIndexes.length == 0) {
+				if (this.fieldIndexes.length == 0) {
 					// no indexes provided, treat tuple as regular object
 					isTuple = false;
 				}
@@ -464,22 +501,22 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 				throw new IllegalArgumentException("You must provide at most one field index array.");
 			}
 
-			if(!isTuple && this.fieldIndexes.length != 0) {
+			if (!isTuple && this.fieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectFirst() (or projectSecond()) without arguments to include it.");
-			} else if(this.fieldIndexes.length > 22) {
+			} else if (this.fieldIndexes.length > 22) {
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields.");
 			}
 
-			if(isTuple) {
+			if (isTuple) {
 				this.isFieldInFirst = new boolean[this.fieldIndexes.length];
 
 				// check field indexes and adapt to position in tuple
 				int maxFieldIndex = firstInput ? numFieldsDs1 : numFieldsDs2;
-				for(int i=0; i<this.fieldIndexes.length; i++) {
+				for (int i = 0; i < this.fieldIndexes.length; i++) {
 					Preconditions.checkElementIndex(this.fieldIndexes[i], maxFieldIndex);
-					
-					if(firstInput) {
+
+					if (firstInput) {
 						this.isFieldInFirst[i] = true;
 					} else {
 						this.isFieldInFirst[i] = false;
@@ -493,11 +530,12 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Continues a ProjectCross transformation and adds fields of the first cross input.<br>
-		 * If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectCross transformation and adds fields of the first cross input.
 		 *
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * <p>If the first cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the first cross input is not a Tuple DataSet, no parameters should be passed.
+		 *
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#projectSecond(int...)}.
 		 *
@@ -515,30 +553,30 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 			boolean isFirstTuple;
 
-			if(ds1.getType() instanceof TupleTypeInfo && firstFieldIndexes.length > 0) {
+			if (ds1.getType() instanceof TupleTypeInfo && firstFieldIndexes.length > 0) {
 				isFirstTuple = true;
 			} else {
 				isFirstTuple = false;
 			}
 
-			if(!isFirstTuple && firstFieldIndexes.length != 0) {
+			if (!isFirstTuple && firstFieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectFirst() without arguments to include it.");
-			} else if(firstFieldIndexes.length > (22 - this.fieldIndexes.length)) {
+			} else if (firstFieldIndexes.length > (22 - this.fieldIndexes.length)) {
 				// to many field indexes provided
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields in total.");
 			}
 
 			int offset = this.fieldIndexes.length;
 
-			if(isFirstTuple) {
+			if (isFirstTuple) {
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + firstFieldIndexes.length);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + firstFieldIndexes.length);
 
 				// copy field indexes
 				int maxFieldIndex = numFieldsDs1;
-				for(int i = 0; i < firstFieldIndexes.length; i++) {
+				for (int i = 0; i < firstFieldIndexes.length; i++) {
 					// check if indexes in range
 					Preconditions.checkElementIndex(firstFieldIndexes[i], maxFieldIndex);
 
@@ -559,11 +597,12 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Continues a ProjectCross transformation and adds fields of the second cross input.<br>
-		 * If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
-		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.<br>
+		 * Continues a ProjectCross transformation and adds fields of the second cross input.
+		 *
+		 * <p>If the second cross input is a {@link Tuple} {@link DataSet}, fields can be selected by their index.
+		 * If the second cross input is not a Tuple DataSet, no parameters should be passed.
 		 *
-		 * Fields of the first and second input can be added by chaining the method calls of
+		 * <p>Fields of the first and second input can be added by chaining the method calls of
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#projectFirst(int...)} and
 		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#projectSecond(int...)}.
 		 *
@@ -581,30 +620,30 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 			boolean isSecondTuple;
 
-			if(ds2.getType() instanceof TupleTypeInfo && secondFieldIndexes.length > 0) {
+			if (ds2.getType() instanceof TupleTypeInfo && secondFieldIndexes.length > 0) {
 				isSecondTuple = true;
 			} else {
 				isSecondTuple = false;
 			}
 
-			if(!isSecondTuple && secondFieldIndexes.length != 0) {
+			if (!isSecondTuple && secondFieldIndexes.length != 0) {
 				// field index provided for non-Tuple input
 				throw new IllegalArgumentException("Input is not a Tuple. Call projectSecond() without arguments to include it.");
-			} else if(secondFieldIndexes.length > (22 - this.fieldIndexes.length)) {
+			} else if (secondFieldIndexes.length > (22 - this.fieldIndexes.length)) {
 				// to many field indexes provided
 				throw new IllegalArgumentException("You may select only up to twenty-two (22) fields in total.");
 			}
 
 			int offset = this.fieldIndexes.length;
 
-			if(isSecondTuple) {
+			if (isSecondTuple) {
 				// extend index and flag arrays
 				this.fieldIndexes = Arrays.copyOf(this.fieldIndexes, this.fieldIndexes.length + secondFieldIndexes.length);
 				this.isFieldInFirst = Arrays.copyOf(this.isFieldInFirst, this.isFieldInFirst.length + secondFieldIndexes.length);
 
 				// copy field indexes
 				int maxFieldIndex = numFieldsDs2;
-				for(int i = 0; i < secondFieldIndexes.length; i++) {
+				for (int i = 0; i < secondFieldIndexes.length; i++) {
 					// check if indexes in range
 					Preconditions.checkElementIndex(secondFieldIndexes[i], maxFieldIndex);
 
@@ -623,7 +662,7 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 			return this;
 		}
-		
+
 		// --------------------------------------------------------------------------------------------
 		// The following lines are generated.
 		// --------------------------------------------------------------------------------------------
@@ -632,8 +671,8 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 		/**
 		 * Chooses a projectTupleX according to the length of
-		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#fieldIndexes} 
-		 * 
+		 * {@link org.apache.flink.api.java.operators.CrossOperator.CrossProjection#fieldIndexes}.
+		 *
 		 * @return The projected DataSet.
 		 */
 		@SuppressWarnings("unchecked")
@@ -673,10 +712,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -688,10 +727,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -703,10 +742,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -718,10 +757,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -733,10 +772,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -748,10 +787,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -763,10 +802,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -778,10 +817,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -793,10 +832,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -808,10 +847,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -823,10 +862,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -838,10 +877,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -853,10 +892,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -868,10 +907,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -883,10 +922,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -898,10 +937,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -913,10 +952,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -928,10 +967,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -943,10 +982,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -958,10 +997,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -973,10 +1012,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -988,10 +1027,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1003,10 +1042,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1018,10 +1057,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1033,10 +1072,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 		}
 
 		/**
-		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields. 
-		 * 
+		 * Projects a pair of crossed elements to a {@link Tuple} with the previously selected fields.
+		 *
 		 * @return The projected data set.
-		 * 
+		 *
 		 * @see Tuple
 		 * @see DataSet
 		 */
@@ -1049,23 +1088,23 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 
 		// END_OF_TUPLE_DEPENDENT_CODE
 		// -----------------------------------------------------------------------------------------
-		
+
 		private TypeInformation<?>[] extractFieldTypes(int[] fields) {
 
 			TypeInformation<?>[] fieldTypes = new TypeInformation[fields.length];
 
-			for(int i=0; i<fields.length; i++) {
+			for (int i = 0; i < fields.length; i++) {
 
 				TypeInformation<?> typeInfo;
-				if(isFieldInFirst[i]) {
-					if(fields[i] >= 0) {
-						typeInfo = ((TupleTypeInfo<?>)ds1.getType()).getTypeAt(fields[i]);
+				if (isFieldInFirst[i]) {
+					if (fields[i] >= 0) {
+						typeInfo = ((TupleTypeInfo<?>) ds1.getType()).getTypeAt(fields[i]);
 					} else {
 						typeInfo = ds1.getType();
 					}
 				} else {
-					if(fields[i] >= 0) {
-						typeInfo = ((TupleTypeInfo<?>)ds2.getType()).getTypeAt(fields[i]);
+					if (fields[i] >= 0) {
+						typeInfo = ((TupleTypeInfo<?>) ds2.getType()).getTypeAt(fields[i]);
 					} else {
 						typeInfo = ds2.getType();
 					}
@@ -1083,10 +1122,10 @@ public class CrossOperator<I1, I2, OUT> extends TwoInputUdfOperator<I1, I2, OUT,
 	// --------------------------------------------------------------------------------------------
 
 	@Internal
-	public static final class DefaultCrossFunction<T1, T2> implements CrossFunction<T1, T2, Tuple2<T1, T2>> {
+	private static final class DefaultCrossFunction<T1, T2> implements CrossFunction<T1, T2, Tuple2<T1, T2>> {
 
 		private static final long serialVersionUID = 1L;
-		
+
 		private final Tuple2<T1, T2> outTuple = new Tuple2<T1, T2>();
 
 		@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/CustomUnaryOperation.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/CustomUnaryOperation.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/CustomUnaryOperation.java
index 8bbdfd1..164a944 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/CustomUnaryOperation.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/CustomUnaryOperation.java
@@ -24,12 +24,12 @@ import org.apache.flink.api.java.DataSet;
 /**
  *
  * @param <IN> The type of the data set consumed by this operator.
- * @param <OUT> The type of the data set produced by this operator. 
+ * @param <OUT> The type of the data set produced by this operator.
  */
 @Public
 public interface CustomUnaryOperation<IN, OUT> {
-	
+
 	void setInput(DataSet<IN> inputData);
-	
+
 	DataSet<OUT> createResult();
 }

[6/6] flink git commit: [FLINK-7187] Activate checkstyle flink-java/sca

Posted by dw...@apache.org.
[FLINK-7187] Activate checkstyle flink-java/sca

This closes #4337.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/53d6582d
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/53d6582d
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/53d6582d

Branch: refs/heads/master
Commit: 53d6582d381fec5751d3b1edaae5c597d0f86b1d
Parents: 34e82f9
Author: Dawid Wysakowicz <dw...@apache.org>
Authored: Fri Jul 14 10:34:19 2017 +0200
Committer: Dawid Wysakowicz <dw...@apache.org>
Committed: Tue Jul 25 10:16:54 2017 +0200

----------------------------------------------------------------------
 .../api/java/sca/CodeAnalyzerException.java     |   1 +
 .../flink/api/java/sca/CodeErrorException.java  |   1 +
 .../flink/api/java/sca/ModifiedASMAnalyzer.java |   3 +-
 .../flink/api/java/sca/ModifiedASMFrame.java    |  12 +-
 .../api/java/sca/NestedMethodAnalyzer.java      |  41 ++---
 .../apache/flink/api/java/sca/TaggedValue.java  |  15 +-
 .../apache/flink/api/java/sca/UdfAnalyzer.java  |  23 +--
 .../flink/api/java/sca/UdfAnalyzerUtils.java    |   7 +-
 .../api/java/sca/UdfAnalyzerExamplesTest.java   |  91 +++++++----
 .../flink/api/java/sca/UdfAnalyzerTest.java     | 149 ++++++++++---------
 tools/maven/suppressions-java.xml               |   4 -
 11 files changed, 197 insertions(+), 150 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeAnalyzerException.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeAnalyzerException.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeAnalyzerException.java
index 4dce071..798cbba 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeAnalyzerException.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeAnalyzerException.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.api.java.sca;
 
 import org.apache.flink.annotation.PublicEvolving;

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeErrorException.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeErrorException.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeErrorException.java
index 776fdb5..aa92e9a 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeErrorException.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/CodeErrorException.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.api.java.sca;
 
 import org.apache.flink.annotation.PublicEvolving;

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMAnalyzer.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMAnalyzer.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMAnalyzer.java
index a555950..ef7f18d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMAnalyzer.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMAnalyzer.java
@@ -19,6 +19,7 @@
 package org.apache.flink.api.java.sca;
 
 import org.apache.flink.annotation.Internal;
+
 import org.objectweb.asm.tree.AbstractInsnNode;
 import org.objectweb.asm.tree.InsnList;
 import org.objectweb.asm.tree.JumpInsnNode;
@@ -126,7 +127,7 @@ public class ModifiedASMAnalyzer extends Analyzer {
 						// from the label the goto instruction points to until the evaluation with IFEQ
 						final int idx = indexField.getInt(accessField(JumpInsnNode.class, "label").get(gotoInsnn));
 
-						for (int i=idx; i <= insn; i++) {
+						for (int i = idx; i <= insn; i++) {
 							((ModifiedASMFrame) frames[i]).mergePriority = true;
 						}
 						eventInsn = idx - 2;

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMFrame.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMFrame.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMFrame.java
index f135869..685a91f 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMFrame.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/ModifiedASMFrame.java
@@ -19,6 +19,7 @@
 package org.apache.flink.api.java.sca;
 
 import org.apache.flink.annotation.Internal;
+
 import org.objectweb.asm.tree.AbstractInsnNode;
 import org.objectweb.asm.tree.analysis.AnalyzerException;
 import org.objectweb.asm.tree.analysis.Frame;
@@ -40,13 +41,14 @@ public class ModifiedASMFrame extends Frame {
 	public ModifiedASMFrame(int nLocals, int nStack) {
 		super(nLocals, nStack);
 	}
+
 	public ModifiedASMFrame(Frame src) {
 		super(src);
 	}
 
 	@Override
 	public Frame init(Frame src) {
-		mergePriority = ((ModifiedASMFrame)src).mergePriority;
+		mergePriority = ((ModifiedASMFrame) src).mergePriority;
 		return super.init(src);
 	}
 
@@ -60,12 +62,12 @@ public class ModifiedASMFrame extends Frame {
 
 	@Override
 	public boolean merge(Frame frame, Interpreter interpreter) throws AnalyzerException {
-		if (((ModifiedASMFrame)frame).mergePriority) {
-			((NestedMethodAnalyzer)interpreter).rightMergePriority = true;
+		if (((ModifiedASMFrame) frame).mergePriority) {
+			((NestedMethodAnalyzer) interpreter).rightMergePriority = true;
 		}
 		final boolean result = super.merge(frame, interpreter);
-		((NestedMethodAnalyzer)interpreter).rightMergePriority = false;
-		((ModifiedASMFrame)frame).mergePriority = false;
+		((NestedMethodAnalyzer) interpreter).rightMergePriority = false;
+		((ModifiedASMFrame) frame).mergePriority = false;
 		return result;
 	}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/NestedMethodAnalyzer.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/NestedMethodAnalyzer.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/NestedMethodAnalyzer.java
index 7304c50..d56d60a 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/NestedMethodAnalyzer.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/NestedMethodAnalyzer.java
@@ -18,19 +18,9 @@
 
 package org.apache.flink.api.java.sca;
 
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.findMethodNode;
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.hasImportantDependencies;
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.isTagged;
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.mergeReturnValues;
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.removeUngroupedInputs;
-import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.tagged;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.java.sca.TaggedValue.Tag;
+
 import org.objectweb.asm.Type;
 import org.objectweb.asm.tree.AbstractInsnNode;
 import org.objectweb.asm.tree.FieldInsnNode;
@@ -43,6 +33,17 @@ import org.objectweb.asm.tree.analysis.AnalyzerException;
 import org.objectweb.asm.tree.analysis.BasicInterpreter;
 import org.objectweb.asm.tree.analysis.BasicValue;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.findMethodNode;
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.hasImportantDependencies;
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.isTagged;
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.mergeReturnValues;
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.removeUngroupedInputs;
+import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.tagged;
+
 /**
  * Extends ASM's BasicInterpreter. Instead of ASM's BasicValues, it introduces
  * TaggedValues which extend BasicValue and allows for appending interesting
@@ -76,7 +77,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 		this.owner = owner;
 		this.methodNode = methodNode;
 		this.argumentValues = argumentValues;
-		
+
 		this.remainingNesting = remainingNesting;
 		if (remainingNesting < 0) {
 			throw new CodeAnalyzerException("Maximum nesting level reached.");
@@ -117,7 +118,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 		}
 		final NestedMethodAnalyzer nma = new NestedMethodAnalyzer(analyzer, (String) mn[1],
 				(MethodNode) mn[0],
-				(List<BasicValue>) values, remainingNesting -1,
+				(List<BasicValue>) values, remainingNesting - 1,
 				topLevelMethod && isBridgeMethod());
 		return nma.analyze();
 	}
@@ -157,7 +158,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 		else if (convertedType.equals("boolean") && actualType.equals("Boolean")) {
 			return Type.BOOLEAN_TYPE;
 		}
-		else if (convertedType.equals("char") && actualType.equals("Character")	) {
+		else if (convertedType.equals("char") && actualType.equals("Character")) {
 			return Type.CHAR_TYPE;
 		}
 		else if (convertedType.equals("float") && actualType.equals("Float")) {
@@ -301,7 +302,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 				return new TaggedValue(Type.getObjectType("null"), Tag.NULL);
 			case NEW:
 				analyzer.incrNewOperationCounters(topLevelMethod);
-				// make new objects a tagged value to have possibility to tag an 
+				// make new objects a tagged value to have possibility to tag an
 				// input container later
 				return new TaggedValue(Type.getObjectType(((TypeInsnNode) insn).desc));
 			// tag "int"-like constants
@@ -358,6 +359,8 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 			case DUP2_X2:
 				if (isTagged(value) && tagged(value).isInput() && tagged(value).isCallByValue()) {
 					return tagged(value).copy();
+				} else {
+					return super.copyOperation(insn, value);
 				}
 			default:
 				return super.copyOperation(insn, value);
@@ -488,13 +491,13 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 	@Override
 	public BasicValue naryOperation(AbstractInsnNode insn, List rawValues) throws AnalyzerException {
 		final List<BasicValue> values = (List<BasicValue>) rawValues;
-		boolean isStatic = false;
+		boolean isStatic;
 		switch (insn.getOpcode()) {
 			case INVOKESTATIC:
-				isStatic = true;
 			case INVOKESPECIAL:
 			case INVOKEVIRTUAL:
 			case INVOKEINTERFACE:
+				isStatic = insn.getOpcode() == INVOKESTATIC;
 				final MethodInsnNode method = (MethodInsnNode) insn;
 				String methodOwner = method.owner;
 
@@ -596,7 +599,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 							&& methodOwner.startsWith("org/apache/flink/api/java/tuple/Tuple")
 							&& isTagged(values.get(0))
 							) {
-						final TaggedValue tuple =tagged(values.get(0));
+						final TaggedValue tuple = tagged(values.get(0));
 						tuple.setTag(Tag.CONTAINER);
 
 						// check if fieldPos is constant
@@ -607,7 +610,7 @@ public class NestedMethodAnalyzer extends BasicInterpreter {
 						else {
 							final int constant = tagged(values.get(2)).getIntConstant();
 
-							if (constant < 0 || Integer.parseInt(methodOwner.split("Tuple")[1]) <= constant ) {
+							if (constant < 0 || Integer.parseInt(methodOwner.split("Tuple")[1]) <= constant) {
 								analyzer.handleInvalidTupleAccess();
 							}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/TaggedValue.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/TaggedValue.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/TaggedValue.java
index cf0716d..aab2cf5 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/TaggedValue.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/TaggedValue.java
@@ -19,6 +19,7 @@
 package org.apache.flink.api.java.sca;
 
 import org.apache.flink.annotation.Internal;
+
 import org.objectweb.asm.Type;
 import org.objectweb.asm.tree.analysis.BasicValue;
 
@@ -34,7 +35,10 @@ import java.util.Map.Entry;
 @Internal
 public class TaggedValue extends BasicValue {
 
-	public static enum Tag {
+	/**
+	 * Possible tags.
+	 */
+	public enum Tag {
 		REGULAR, // regular object with no special meaning
 		THIS, // a special container which is the instance of the UDF
 		INPUT, // atomic input field
@@ -46,7 +50,10 @@ public class TaggedValue extends BasicValue {
 		NULL // null
 	}
 
-	public static enum Input {
+	/**
+	 * Distinguishes between inputs in case of two input operators.
+	 */
+	public enum Input {
 		INPUT_1(0), INPUT_2(1);
 
 		private int id;
@@ -219,7 +226,7 @@ public class TaggedValue extends BasicValue {
 
 	private void traverseContainer(Input input, Map<String, TaggedValue> containerMapping, StringBuilder sb,
 			String prefix) {
-		for (Map.Entry<String,TaggedValue> entry : containerMapping.entrySet()) {
+		for (Map.Entry<String, TaggedValue> entry : containerMapping.entrySet()) {
 			// skip undefined states
 			if (entry.getValue() == null) {
 				continue;
@@ -244,7 +251,7 @@ public class TaggedValue extends BasicValue {
 			// input containers
 			else if (entry.getValue().canContainFields()) {
 				traverseContainer(input, entry.getValue().containerMapping, sb,
-						((prefix.length() > 0)? prefix + "." : "") + entry.getKey());
+						((prefix.length() > 0) ? prefix + "." : "") + entry.getKey());
 			}
 		}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzer.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzer.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzer.java
index 1f5bee1..6022959 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzer.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzer.java
@@ -29,13 +29,14 @@ import org.apache.flink.api.common.functions.JoinFunction;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.common.functions.ReduceFunction;
 import org.apache.flink.api.common.operators.DualInputSemanticProperties;
+import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
 import org.apache.flink.api.common.operators.SemanticProperties;
 import org.apache.flink.api.common.operators.SingleInputSemanticProperties;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
-import org.apache.flink.api.common.operators.Keys;
-import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
 import org.apache.flink.api.java.sca.TaggedValue.Input;
+
 import org.objectweb.asm.Type;
 import org.objectweb.asm.tree.MethodNode;
 import org.slf4j.Logger;
@@ -58,7 +59,7 @@ import static org.apache.flink.api.java.sca.UdfAnalyzerUtils.removeUngroupedInpu
  * constants are tagged such that a tracking of atomic input fields through the
  * entire UDF (until the function returns or calls collect()) is possible.
  *
- * The implementation is as conservative as possible meaning that for cases
+ * <p>The implementation is as conservative as possible meaning that for cases
  * or bytecode instructions that haven't been considered the analyzer
  * will fallback to the ASM library (which removes TaggedValues).
  */
@@ -271,7 +272,7 @@ public class UdfAnalyzer {
 		if (state == STATE_END_OF_ANALYZING) {
 			throw new IllegalStateException("Analyzing is already done.");
 		}
-		
+
 		boolean discardReturnValues = false;
 
 		if (isIterableInput) {
@@ -334,7 +335,7 @@ public class UdfAnalyzer {
 						"If you think the code analysis is wrong at this point you can " +
 						"disable the entire code analyzer in ExecutionConfig or add" +
 						" @SkipCodeAnalysis to your function to disable the analysis.",
-						(cause != null)? cause : e);
+						(cause != null) ? cause : e);
 			}
 			throw new CodeAnalyzerException("Exception occurred during code analysis.", e);
 		}
@@ -348,13 +349,13 @@ public class UdfAnalyzer {
 			if (returnValue != null) {
 				String[] ff1Array = null;
 				final String ff1 = returnValue.toForwardedFieldsExpression(Input.INPUT_1);
-				if (ff1 !=null && ff1.length() > 0) {
-					ff1Array = new String[] { ff1 };
+				if (ff1 != null && ff1.length() > 0) {
+					ff1Array = new String[]{ff1};
 				}
 				String[] ff2Array = null;
 				final String ff2 = returnValue.toForwardedFieldsExpression(Input.INPUT_2);
-				if (ff2 !=null && ff2.length() > 0) {
-					ff2Array = new String[] { ff2 };
+				if (ff2 != null && ff2.length() > 0) {
+					ff2Array = new String[]{ff2};
 				}
 				SemanticPropUtil.getSemanticPropsDualFromString((DualInputSemanticProperties) sp,
 						ff1Array, ff2Array, null, null, null, null, in1Type, in2Type, outType, true);
@@ -365,8 +366,8 @@ public class UdfAnalyzer {
 			if (returnValue != null) {
 				String[] ffArray = null;
 				final String ff = returnValue.toForwardedFieldsExpression(Input.INPUT_1);
-				if (ff !=null && ff.length() > 0) {
-					ffArray = new String[] { ff };
+				if (ff != null && ff.length() > 0) {
+					ffArray = new String[]{ff};
 				}
 				SemanticPropUtil.getSemanticPropsSingleFromString((SingleInputSemanticProperties) sp,
 						ffArray, null, null, in1Type, outType, true);

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzerUtils.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzerUtils.java b/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzerUtils.java
index a477a03..2844aea 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzerUtils.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/sca/UdfAnalyzerUtils.java
@@ -25,6 +25,7 @@ import org.apache.flink.api.common.typeutils.CompositeType;
 import org.apache.flink.api.java.typeutils.PojoTypeInfo;
 import org.apache.flink.api.java.typeutils.TupleTypeInfo;
 import org.apache.flink.api.java.typeutils.TupleTypeInfoBase;
+
 import org.objectweb.asm.ClassReader;
 import org.objectweb.asm.Type;
 import org.objectweb.asm.tree.ClassNode;
@@ -45,7 +46,7 @@ import java.util.Map;
 import java.util.Set;
 
 /**
- * Utility class to work with {@link UdfAnalyzer}
+ * Utility class to work with {@link UdfAnalyzer}.
  */
 @Internal
 public final class UdfAnalyzerUtils {
@@ -64,7 +65,7 @@ public final class UdfAnalyzerUtils {
 				}
 				// scala
 				else {
-					fieldName = "_" + (i+1);
+					fieldName = "_" + (i + 1);
 				}
 				containerMapping.put(fieldName,
 						convertTypeInfoToTaggedValue(input,
@@ -145,7 +146,7 @@ public final class UdfAnalyzerUtils {
 			if (stream != null) {
 				try {
 					stream.close();
-				} catch (IOException e) { 
+				} catch (IOException e) {
 					// best effort cleanup
 				}
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerExamplesTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerExamplesTest.java b/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerExamplesTest.java
index 72a27b1..1b0cbec 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerExamplesTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerExamplesTest.java
@@ -33,6 +33,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.tuple.Tuple5;
 import org.apache.flink.util.Collector;
+
 import org.junit.Test;
 
 import java.io.Serializable;
@@ -59,7 +60,7 @@ public class UdfAnalyzerExamplesTest {
 	// EnumTriangles
 	// --------------------------------------------------------------------------------------------
 
-	public static class Edge extends Tuple2<Integer, Integer> {
+	private static class Edge extends Tuple2<Integer, Integer> {
 		private static final long serialVersionUID = 1L;
 
 		public static final int V1 = 0;
@@ -72,13 +73,21 @@ public class UdfAnalyzerExamplesTest {
 			this.setSecondVertex(v2);
 		}
 
-		public Integer getFirstVertex() { return this.getField(V1); }
+		public Integer getFirstVertex() {
+			return this.getField(V1);
+		}
 
-		public Integer getSecondVertex() { return this.getField(V2); }
+		public Integer getSecondVertex() {
+			return this.getField(V2);
+		}
 
-		public void setFirstVertex(final Integer vertex1) { this.setField(vertex1, V1); }
+		public void setFirstVertex(final Integer vertex1) {
+			this.setField(vertex1, V1);
+		}
 
-		public void setSecondVertex(final Integer vertex2) { this.setField(vertex2, V2); }
+		public void setSecondVertex(final Integer vertex2) {
+			this.setField(vertex2, V2);
+		}
 
 		public void copyVerticesFromTuple2(Tuple2<Integer, Integer> t) {
 			this.setFirstVertex(t.f0);
@@ -92,20 +101,27 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
-	public static class Triad extends Tuple3<Integer, Integer, Integer> {
+	private static class Triad extends Tuple3<Integer, Integer, Integer> {
 		private static final long serialVersionUID = 1L;
 
 		public static final int V1 = 0;
 		public static final int V2 = 1;
 		public static final int V3 = 2;
 
-		public Triad() {}
+		public Triad() {
+		}
 
-		public void setFirstVertex(final Integer vertex1) { this.setField(vertex1, V1); }
+		public void setFirstVertex(final Integer vertex1) {
+			this.setField(vertex1, V1);
+		}
 
-		public void setSecondVertex(final Integer vertex2) { this.setField(vertex2, V2); }
+		public void setSecondVertex(final Integer vertex2) {
+			this.setField(vertex2, V2);
+		}
 
-		public void setThirdVertex(final Integer vertex3) { this.setField(vertex3, V3); }
+		public void setThirdVertex(final Integer vertex3) {
+			this.setField(vertex3, V3);
+		}
 	}
 
 	@ForwardedFields("0")
@@ -151,7 +167,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0;1")
-	public static class TupleEdgeConverter implements MapFunction<Tuple2<Integer, Integer>, Edge> {
+	private static class TupleEdgeConverter implements MapFunction<Tuple2<Integer, Integer>, Edge> {
 		private final Edge outEdge = new Edge();
 
 		@Override
@@ -204,14 +220,14 @@ public class UdfAnalyzerExamplesTest {
 				edge = edges.next();
 				Integer otherVertex = edge.getSecondVertex();
 				// collect unique vertices
-				if(!otherVertices.contains(otherVertex) && !otherVertex.equals(groupVertex)) {
+				if (!otherVertices.contains(otherVertex) && !otherVertex.equals(groupVertex)) {
 					this.otherVertices.add(otherVertex);
 				}
 			}
 
 			// emit edges
-			for(Integer otherVertex : this.otherVertices) {
-				if(groupVertex < otherVertex) {
+			for (Integer otherVertex : this.otherVertices) {
+				if (groupVertex < otherVertex) {
 					outputEdge.setFirstVertex(groupVertex);
 					outputEdge.setSecondVertex(otherVertex);
 				} else {
@@ -235,6 +251,9 @@ public class UdfAnalyzerExamplesTest {
 	// KMeans
 	// --------------------------------------------------------------------------------------------
 
+	/**
+	 * Representation of point int 2d plane.
+	 */
 	public static class Point implements Serializable {
 		public double x, y;
 
@@ -267,13 +286,16 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
+	/**
+	 * Representation of centroid in 2d plane.
+	 */
 	public static class Centroid extends Point {
 		public int id;
 
 		public Centroid() {}
 
 		public Centroid(int id, double x, double y) {
-			super(x,y);
+			super(x, y);
 			this.id = id;
 		}
 
@@ -289,7 +311,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0")
-	public static final class CentroidAccumulator implements ReduceFunction<Tuple3<Integer, Point, Long>> {
+	private static final class CentroidAccumulator implements ReduceFunction<Tuple3<Integer, Point, Long>> {
 		@Override
 		public Tuple3<Integer, Point, Long> reduce(Tuple3<Integer, Point, Long> val1, Tuple3<Integer, Point, Long> val2) {
 			return new Tuple3<Integer, Point, Long>(val1.f0, val1.f1.add(val2.f1), val1.f2 + val2.f2);
@@ -305,7 +327,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0->id")
-	public static final class CentroidAverager implements MapFunction<Tuple3<Integer, Point, Long>, Centroid> {
+	private static final class CentroidAverager implements MapFunction<Tuple3<Integer, Point, Long>, Centroid> {
 		@Override
 		public Centroid map(Tuple3<Integer, Point, Long> value) {
 			return new Centroid(value.f0, value.f1.div(value.f2));
@@ -323,7 +345,7 @@ public class UdfAnalyzerExamplesTest {
 	// ConnectedComponents
 	// --------------------------------------------------------------------------------------------
 
-	public static final class UndirectEdge implements FlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static final class UndirectEdge implements FlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		Tuple2<Long, Long> invertedEdge = new Tuple2<Long, Long>();
 
 		@Override
@@ -343,7 +365,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFieldsFirst("*")
-	public static final class ComponentIdFilter implements FlatJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static final class ComponentIdFilter implements FlatJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public void join(Tuple2<Long, Long> candidate, Tuple2<Long, Long> old, Collector<Tuple2<Long, Long>> out) {
 			if (candidate.f1 < old.f1) {
@@ -361,7 +383,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("*->f0;*->f1")
-	public static final class DuplicateValue<T> implements MapFunction<T, Tuple2<T, T>> {
+	private static final class DuplicateValue<T> implements MapFunction<T, Tuple2<T, T>> {
 		@Override
 		public Tuple2<T, T> map(T vertex) {
 			return new Tuple2<T, T>(vertex, vertex);
@@ -377,7 +399,7 @@ public class UdfAnalyzerExamplesTest {
 
 	@ForwardedFieldsFirst("f1->f1")
 	@ForwardedFieldsSecond("f1->f0")
-	public static final class NeighborWithComponentIDJoin implements JoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static final class NeighborWithComponentIDJoin implements JoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public Tuple2<Long, Long> join(Tuple2<Long, Long> vertexWithComponent, Tuple2<Long, Long> edge) {
 			return new Tuple2<Long, Long>(edge.f1, vertexWithComponent.f1);
@@ -397,7 +419,7 @@ public class UdfAnalyzerExamplesTest {
 	// --------------------------------------------------------------------------------------------
 
 	@ForwardedFieldsFirst("f1")
-	public static class AntiJoinVisits implements CoGroupFunction<Tuple3<Integer, String, Integer>, Tuple1<String>, Tuple3<Integer, String, Integer>> {
+	private static class AntiJoinVisits implements CoGroupFunction<Tuple3<Integer, String, Integer>, Tuple1<String>, Tuple3<Integer, String, Integer>> {
 		@Override
 		public void coGroup(Iterable<Tuple3<Integer, String, Integer>> ranks, Iterable<Tuple1<String>> visits, Collector<Tuple3<Integer, String, Integer>> out) {
 			// Check if there is a entry in the visits relation
@@ -424,7 +446,7 @@ public class UdfAnalyzerExamplesTest {
 	// --------------------------------------------------------------------------------------------
 
 	@ForwardedFields("0")
-	public static class BuildOutgoingEdgeList implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long[]>> {
+	private static class BuildOutgoingEdgeList implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long[]>> {
 		private final ArrayList<Long> neighbors = new ArrayList<Long>();
 
 		@Override
@@ -452,7 +474,7 @@ public class UdfAnalyzerExamplesTest {
 	// LogisticRegression
 	// --------------------------------------------------------------------------------------------
 
-	public static class Vector extends Tuple1<double[]> {
+	private static class Vector extends Tuple1<double[]> {
 		public Vector() {
 			// default constructor needed for instantiation during serialization
 		}
@@ -482,7 +504,7 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
-	public static class Gradient extends Vector {
+	private static class Gradient extends Vector {
 		public Gradient() {
 			// default constructor needed for instantiation during serialization
 		}
@@ -492,7 +514,7 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
-	public static class PointWithLabel extends Tuple2<Integer, double[]> {
+	private static class PointWithLabel extends Tuple2<Integer, double[]> {
 		public double[] getFeatures() {
 			return this.f1;
 		}
@@ -514,7 +536,7 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
-	public static class SumGradient implements ReduceFunction<Gradient> {
+	private static class SumGradient implements ReduceFunction<Gradient> {
 		@Override
 		public Gradient reduce(Gradient gradient1, Gradient gradient2) throws Exception {
 			// grad(i) +=
@@ -534,7 +556,7 @@ public class UdfAnalyzerExamplesTest {
 				new String[] { "0" });
 	}
 
-	public static class PointParser implements MapFunction<String, PointWithLabel> {
+	private static class PointParser implements MapFunction<String, PointWithLabel> {
 		@Override
 		public PointWithLabel map(String value) throws Exception {
 			PointWithLabel p = new PointWithLabel();
@@ -570,7 +592,7 @@ public class UdfAnalyzerExamplesTest {
 	// Canopy
 	// --------------------------------------------------------------------------------------------
 
-	public static class Document extends Tuple5<Integer, Boolean, Boolean, String, String> {
+	private static class Document extends Tuple5<Integer, Boolean, Boolean, String, String> {
 		public Document() {
 			// default constructor needed for instantiation during serialization
 		}
@@ -584,7 +606,7 @@ public class UdfAnalyzerExamplesTest {
 		}
 	}
 
-	public static class MessageBOW implements FlatMapFunction<String, Tuple2<Integer, String>> {
+	private static class MessageBOW implements FlatMapFunction<String, Tuple2<Integer, String>> {
 		@Override
 		public void flatMap(String value, Collector<Tuple2<Integer, String>> out) throws Exception {
 			String[] splits = value.split(" ");
@@ -603,7 +625,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0")
-	public static class DocumentReducer implements GroupReduceFunction<Tuple2<Integer, String>, Document> {
+	private static class DocumentReducer implements GroupReduceFunction<Tuple2<Integer, String>, Document> {
 		@Override
 		public void reduce(Iterable<Tuple2<Integer, String>> values, Collector<Document> out) throws Exception {
 			Iterator<Tuple2<Integer, String>> it = values.iterator();
@@ -626,7 +648,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0;4")
-	public static class MapToCenter implements MapFunction<Document, Document> {
+	private static class MapToCenter implements MapFunction<Document, Document> {
 		private Document center;
 
 		@Override
@@ -655,6 +677,9 @@ public class UdfAnalyzerExamplesTest {
 	// K-Meanspp
 	// --------------------------------------------------------------------------------------------
 
+	/**
+	 * Representation of document with word frequencies.
+	 */
 	public static class DocumentWithFreq implements Serializable {
 		private static final long serialVersionUID = -8646398807053061675L;
 
@@ -677,7 +702,7 @@ public class UdfAnalyzerExamplesTest {
 	}
 
 	@ForwardedFields("0->id")
-	public static final class RecordToDocConverter implements GroupReduceFunction<Tuple3<Integer, Integer, Double>, DocumentWithFreq> {
+	private static final class RecordToDocConverter implements GroupReduceFunction<Tuple3<Integer, Integer, Double>, DocumentWithFreq> {
 		private static final long serialVersionUID = -8476366121490468956L;
 
 		@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerTest.java b/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerTest.java
index c371082..7e88838 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/sca/UdfAnalyzerTest.java
@@ -43,6 +43,7 @@ import org.apache.flink.api.java.tuple.Tuple4;
 import org.apache.flink.api.java.tuple.Tuple8;
 import org.apache.flink.api.java.typeutils.TypeInfoParser;
 import org.apache.flink.util.Collector;
+
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -53,11 +54,14 @@ import java.util.Set;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link UdfAnalyzer}.
+ */
 @SuppressWarnings("serial")
 public class UdfAnalyzerTest {
 
 	@ForwardedFields("f0->*")
-	public static class Map1 implements MapFunction<Tuple2<String, Integer>, String> {
+	private static class Map1 implements MapFunction<Tuple2<String, Integer>, String> {
 		public String map(Tuple2<String, Integer> value) throws Exception {
 			return value.f0;
 		}
@@ -70,7 +74,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f0;f0->f1")
-	public static class Map2 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
+	private static class Map2 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
 		public Tuple2<String, String> map(Tuple2<String, Integer> value) throws Exception {
 			return new Tuple2<String, String>(value.f0, value.f0);
 		}
@@ -82,7 +86,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<String,String>");
 	}
 
-	public static class Map3 implements MapFunction<String[], Integer> {
+	private static class Map3 implements MapFunction<String[], Integer> {
 		@Override
 		public Integer map(String[] value) throws Exception {
 			return value.length;
@@ -95,7 +99,7 @@ public class UdfAnalyzerTest {
 		compareAnalyzerResultWithAnnotationsSingleInput(MapFunction.class, Map3.class, "String[]", "Integer");
 	}
 
-	public static class Map4 implements MapFunction<MyPojo, String> {
+	private static class Map4 implements MapFunction<MyPojo, String> {
 		@Override
 		public String map(MyPojo value) throws Exception {
 			return value.field2;
@@ -109,7 +113,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field2->*")
-	public static class Map5 implements MapFunction<MyPojo, String> {
+	private static class Map5 implements MapFunction<MyPojo, String> {
 		@Override
 		public String map(MyPojo value) throws Exception {
 			return value.field2;
@@ -123,7 +127,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field->*")
-	public static class Map6 implements MapFunction<MyPojo, String> {
+	private static class Map6 implements MapFunction<MyPojo, String> {
 		@Override
 		public String map(MyPojo value) throws Exception {
 			return value.field;
@@ -137,7 +141,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f1")
-	public static class Map7 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
+	private static class Map7 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
 		public Tuple2<String, String> map(Tuple2<String, Integer> value) throws Exception {
 			if (value.f0.equals("whatever")) {
 				return new Tuple2<String, String>(value.f0, value.f0);
@@ -153,7 +157,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<String,String>");
 	}
 
-	public static class Map8 implements MapFunction<Tuple2<String, String>, String> {
+	private static class Map8 implements MapFunction<Tuple2<String, String>, String> {
 		public String map(Tuple2<String, String> value) throws Exception {
 			if (value.f0.equals("whatever")) {
 				return value.f0;
@@ -170,7 +174,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*->f0")
-	public static class Map9 implements MapFunction<String, Tuple1<String>> {
+	private static class Map9 implements MapFunction<String, Tuple1<String>> {
 		private Tuple1<String> tuple = new Tuple1<String>();
 
 		public Tuple1<String> map(String value) throws Exception {
@@ -185,7 +189,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*->f0.f0")
-	public static class Map10 implements MapFunction<String, Tuple1<Tuple1<String>>> {
+	private static class Map10 implements MapFunction<String, Tuple1<Tuple1<String>>> {
 		private Tuple1<Tuple1<String>> tuple = new Tuple1<Tuple1<String>>();
 
 		public Tuple1<Tuple1<String>> map(String value) throws Exception {
@@ -201,7 +205,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*->f1")
-	public static class Map11 implements MapFunction<String, Tuple2<String, String>> {
+	private static class Map11 implements MapFunction<String, Tuple2<String, String>> {
 		private Tuple2<String, String> tuple = new Tuple2<String, String>();
 
 		public Tuple2<String, String> map(String value) throws Exception {
@@ -223,7 +227,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f0.f0;f0->f1.f0")
-	public static class Map12 implements MapFunction<Tuple2<String, Integer>, Tuple2<Tuple1<String>, Tuple1<String>>> {
+	private static class Map12 implements MapFunction<Tuple2<String, Integer>, Tuple2<Tuple1<String>, Tuple1<String>>> {
 		public Tuple2<Tuple1<String>, Tuple1<String>> map(Tuple2<String, Integer> value) throws Exception {
 			return new Tuple2<Tuple1<String>, Tuple1<String>>(new Tuple1<String>(value.f0), new Tuple1<String>(
 					value.f0));
@@ -237,7 +241,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f1.f0")
-	public static class Map13 implements MapFunction<Tuple2<String, Integer>, Tuple2<Tuple1<String>, Tuple1<String>>> {
+	private static class Map13 implements MapFunction<Tuple2<String, Integer>, Tuple2<Tuple1<String>, Tuple1<String>>> {
 		@SuppressWarnings("unchecked")
 		public Tuple2<Tuple1<String>, Tuple1<String>> map(Tuple2<String, Integer> value) throws Exception {
 			Tuple2<?, ?> t = new Tuple2<Tuple1<String>, Tuple1<String>>(new Tuple1<String>(value.f0),
@@ -254,7 +258,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0")
-	public static class Map14 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
+	private static class Map14 implements MapFunction<Tuple2<String, Integer>, Tuple2<String, String>> {
 		public Tuple2<String, String> map(Tuple2<String, Integer> value) throws Exception {
 			Tuple2<String, String> t = new Tuple2<String, String>();
 			t.f0 = value.f0;
@@ -269,7 +273,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0.f0->f0")
-	public static class Map15 implements MapFunction<Tuple2<Tuple1<String>, Integer>, Tuple2<String, String>> {
+	private static class Map15 implements MapFunction<Tuple2<Tuple1<String>, Integer>, Tuple2<String, String>> {
 		public Tuple2<String, String> map(Tuple2<Tuple1<String>, Integer> value) throws Exception {
 			Tuple2<String, String> t = new Tuple2<String, String>();
 			t.f0 = value.f0.f0;
@@ -284,7 +288,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field->field2;field2->field")
-	public static class Map16 implements MapFunction<MyPojo, MyPojo> {
+	private static class Map16 implements MapFunction<MyPojo, MyPojo> {
 		public MyPojo map(MyPojo value) throws Exception {
 			MyPojo p = new MyPojo();
 			p.setField(value.getField2());
@@ -300,7 +304,7 @@ public class UdfAnalyzerTest {
 				"org.apache.flink.api.java.sca.UdfAnalyzerTest$MyPojo<field=String,field2=String>");
 	}
 
-	public static class Map17 implements MapFunction<String, Tuple1<String>> {
+	private static class Map17 implements MapFunction<String, Tuple1<String>> {
 		private Tuple1<String> tuple = new Tuple1<String>();
 
 		public Tuple1<String> map(String value) throws Exception {
@@ -318,7 +322,7 @@ public class UdfAnalyzerTest {
 		compareAnalyzerResultWithAnnotationsSingleInput(MapFunction.class, Map17.class, "String", "Tuple1<String>");
 	}
 
-	public static class Map18 implements MapFunction<Tuple1<String>, ArrayList<String>> {
+	private static class Map18 implements MapFunction<Tuple1<String>, ArrayList<String>> {
 		private ArrayList<String> list = new ArrayList<String>();
 
 		public ArrayList<String> map(Tuple1<String> value) throws Exception {
@@ -334,7 +338,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*->f0")
-	public static class Map19 implements MapFunction<Integer, Tuple1<Integer>> {
+	private static class Map19 implements MapFunction<Integer, Tuple1<Integer>> {
 		@Override
 		public Tuple1<Integer> map(Integer value) throws Exception {
 			Tuple1<Integer> tuple = new Tuple1<Integer>();
@@ -351,7 +355,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0;f1")
-	public static class Map20 implements
+	private static class Map20 implements
 	MapFunction<Tuple4<Integer, Integer, Integer, Integer>, Tuple4<Integer, Integer, Integer, Integer>> {
 
 		@Override
@@ -371,7 +375,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f1;f1->f0")
-	public static class Map21 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
+	private static class Map21 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
 		@Override
 		public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
 			Integer i = value.f0;
@@ -388,7 +392,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->f1;f1->f0")
-	public static class Map22 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
+	private static class Map22 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
 		@Override
 		public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
 			Tuple2<Integer, Integer> t = new Tuple2<Integer, Integer>();
@@ -405,7 +409,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*")
-	public static class Map23 implements MapFunction<Tuple1<Integer>, Tuple1<Integer>> {
+	private static class Map23 implements MapFunction<Tuple1<Integer>, Tuple1<Integer>> {
 		@Override
 		public Tuple1<Integer> map(Tuple1<Integer> value) throws Exception {
 			if (value.f0.equals(23)) {
@@ -426,7 +430,7 @@ public class UdfAnalyzerTest {
 				"Tuple1<Integer>");
 	}
 
-	public static class Map24 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
+	private static class Map24 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
 		@Override
 		public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
 			value.setField(2, 0);
@@ -443,7 +447,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f1->f0;f1")
-	public static class Map25 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
+	private static class Map25 implements MapFunction<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
 		@Override
 		public Tuple2<Integer, Integer> map(Tuple2<Integer, Integer> value) throws Exception {
 			value.f0 = value.f1;
@@ -458,7 +462,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*->1")
-	public static class Map26 implements MapFunction<Integer, Tuple2<Integer, Integer>> {
+	private static class Map26 implements MapFunction<Integer, Tuple2<Integer, Integer>> {
 
 		@Override
 		public Tuple2<Integer, Integer> map(Integer value) throws Exception {
@@ -483,7 +487,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("2->3;3->7")
-	public static class Map27
+	private static class Map27
 	implements
 	MapFunction<Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>,
 	Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>> {
@@ -491,8 +495,8 @@ public class UdfAnalyzerTest {
 		public Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer> map(
 				Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer> value)
 						throws Exception {
-			Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer> tuple
-			= new Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>();
+			Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer> tuple =
+				new Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>();
 			// non-input content
 			if (tuple.f0 == null) {
 				tuple.setField(123456, 0);
@@ -515,7 +519,7 @@ public class UdfAnalyzerTest {
 				"Tuple8<Integer, Integer, Integer, Integer, Integer, Integer, Integer, Integer>");
 	}
 
-	public static class Map28 implements MapFunction<Integer, Integer> {
+	private static class Map28 implements MapFunction<Integer, Integer> {
 		@Override
 		public Integer map(Integer value) throws Exception {
 			if (value == null) {
@@ -531,7 +535,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("0")
-	public static class Map29 implements MapFunction<Tuple3<String, String, String>, Tuple3<String, String, String>> {
+	private static class Map29 implements MapFunction<Tuple3<String, String, String>, Tuple3<String, String, String>> {
 		@Override
 		public Tuple3<String, String, String> map(Tuple3<String, String, String> value) throws Exception {
 			String tmp = value.f0;
@@ -554,7 +558,7 @@ public class UdfAnalyzerTest {
 				"Tuple3<String, String, String>", "Tuple3<String, String, String>");
 	}
 
-	public static class Map30 implements MapFunction<Tuple2<String, String>, String> {
+	private static class Map30 implements MapFunction<Tuple2<String, String>, String> {
 		@Override
 		public String map(Tuple2<String, String> value) throws Exception {
 			String tmp;
@@ -574,7 +578,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("1->1;1->0")
-	public static class Map31 implements MapFunction<Tuple2<String, String>, ExtendingTuple> {
+	private static class Map31 implements MapFunction<Tuple2<String, String>, ExtendingTuple> {
 		@Override
 		public ExtendingTuple map(Tuple2<String, String> value) throws Exception {
 			ExtendingTuple t = new ExtendingTuple();
@@ -592,7 +596,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*")
-	public static class Map32
+	private static class Map32
 	implements
 	MapFunction<Tuple8<Boolean, Character, Byte, Short, Integer, Long, Float, Double>,
 	Tuple8<Boolean, Character, Byte, Short, Integer, Long, Float, Double>> {
@@ -620,7 +624,7 @@ public class UdfAnalyzerTest {
 				"Tuple8<Boolean, Character, Byte, Short, Integer, Long, Float, Double>");
 	}
 
-	public static class Map33 implements MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class Map33 implements MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public Tuple2<Long, Long> map(Tuple2<Long, Long> value) throws Exception {
 			Tuple2<Long, Long> t = new Tuple2<Long, Long>();
@@ -641,7 +645,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("1")
-	public static class Map34 implements MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class Map34 implements MapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		private Tuple2<Long, Long> t;
 		@Override
 		public Tuple2<Long, Long> map(Tuple2<Long, Long> value) throws Exception {
@@ -663,7 +667,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<Long, Long>");
 	}
 
-	public static class Map35 implements MapFunction<String[], Tuple2<String[], String[]>> {
+	private static class Map35 implements MapFunction<String[], Tuple2<String[], String[]>> {
 		@Override
 		public Tuple2<String[], String[]> map(String[] value) throws Exception {
 			String[] tmp = value;
@@ -678,7 +682,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<String[], String[]>");
 	}
 
-	public static class Map36 implements MapFunction<Tuple3<String, String, String>, Tuple3<String, String, String>> {
+	private static class Map36 implements MapFunction<Tuple3<String, String, String>, Tuple3<String, String, String>> {
 		@Override
 		public Tuple3<String, String, String> map(Tuple3<String, String, String> value) throws Exception {
 			int i = 0;
@@ -696,7 +700,7 @@ public class UdfAnalyzerTest {
 				"Tuple3<String, String, String>");
 	}
 
-	public static class Map37 implements MapFunction<Tuple1<Tuple1<String>>, Tuple1<Tuple1<String>>> {
+	private static class Map37 implements MapFunction<Tuple1<Tuple1<String>>, Tuple1<Tuple1<String>>> {
 		@SuppressWarnings("unchecked")
 		@Override
 		public Tuple1<Tuple1<String>> map(Tuple1<Tuple1<String>> value) throws Exception {
@@ -712,7 +716,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field")
-	public static class Map38 implements MapFunction<MyPojo2, MyPojo2> {
+	private static class Map38 implements MapFunction<MyPojo2, MyPojo2> {
 		@Override
 		public MyPojo2 map(MyPojo2 value) throws Exception {
 			value.setField2("test");
@@ -727,7 +731,7 @@ public class UdfAnalyzerTest {
 				"org.apache.flink.api.java.sca.UdfAnalyzerTest$MyPojo2<field=String,field2=String>");
 	}
 
-	public static class Map39 implements MapFunction<MyPojo, MyPojo> {
+	private static class Map39 implements MapFunction<MyPojo, MyPojo> {
 		@Override
 		public MyPojo map(MyPojo value) throws Exception {
 			MyPojo mp = new MyPojo();
@@ -744,7 +748,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field2")
-	public static class Map40 implements MapFunction<MyPojo, MyPojo> {
+	private static class Map40 implements MapFunction<MyPojo, MyPojo> {
 		@Override
 		public MyPojo map(MyPojo value) throws Exception {
 			return recursiveFunction(value);
@@ -767,7 +771,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field;field2")
-	public static class Map41 extends RichMapFunction<MyPojo, MyPojo> {
+	private static class Map41 extends RichMapFunction<MyPojo, MyPojo> {
 		private MyPojo field;
 		@Override
 		public MyPojo map(MyPojo value) throws Exception {
@@ -785,7 +789,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("*")
-	public static class FlatMap1 implements FlatMapFunction<Tuple1<Integer>, Tuple1<Integer>> {
+	private static class FlatMap1 implements FlatMapFunction<Tuple1<Integer>, Tuple1<Integer>> {
 		@Override
 		public void flatMap(Tuple1<Integer> value, Collector<Tuple1<Integer>> out) throws Exception {
 			out.collect(value);
@@ -799,7 +803,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("0->1;1->0")
-	public static class FlatMap2 implements FlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class FlatMap2 implements FlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		Tuple2<Long, Long> invertedEdge = new Tuple2<Long, Long>();
 
 		@Override
@@ -817,7 +821,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<Long, Long>");
 	}
 
-	public static class FlatMap3 implements FlatMapFunction<Tuple1<Integer>, Tuple1<Integer>> {
+	private static class FlatMap3 implements FlatMapFunction<Tuple1<Integer>, Tuple1<Integer>> {
 		@Override
 		public void flatMap(Tuple1<Integer> value, Collector<Tuple1<Integer>> out) throws Exception {
 			addToCollector(out);
@@ -837,7 +841,7 @@ public class UdfAnalyzerTest {
 
 	@ForwardedFieldsFirst("f1->f1")
 	@ForwardedFieldsSecond("f1->f0")
-	public static class Join1 implements JoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class Join1 implements JoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public Tuple2<Long, Long> join(Tuple2<Long, Long> vertexWithComponent, Tuple2<Long, Long> edge) {
 			return new Tuple2<Long, Long>(edge.f1, vertexWithComponent.f1);
@@ -851,7 +855,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFieldsFirst("*")
-	public static class Join2 implements FlatJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class Join2 implements FlatJoinFunction<Tuple2<Long, Long>, Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public void join(Tuple2<Long, Long> candidate, Tuple2<Long, Long> old, Collector<Tuple2<Long, Long>> out) {
 			if (candidate.f1 < old.f1) {
@@ -867,7 +871,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("0")
-	public static class GroupReduce1 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class GroupReduce1 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long>> out) throws Exception {
 			out.collect(values.iterator().next());
@@ -881,7 +885,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("1->0")
-	public static class GroupReduce2 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class GroupReduce2 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long>> out) throws Exception {
 			final Iterator<Tuple2<Long, Long>> it = values.iterator();
@@ -907,7 +911,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field2")
-	public static class GroupReduce3 implements GroupReduceFunction<MyPojo, MyPojo> {
+	private static class GroupReduce3 implements GroupReduceFunction<MyPojo, MyPojo> {
 		@Override
 		public void reduce(Iterable<MyPojo> values, Collector<MyPojo> out) throws Exception {
 			for (MyPojo value : values) {
@@ -924,7 +928,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->*")
-	public static class GroupReduce4 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce4 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
 			Long id = 0L;
@@ -942,7 +946,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->*")
-	public static class GroupReduce4_Javac implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce4Javac implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@SuppressWarnings("unchecked")
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
@@ -962,11 +966,11 @@ public class UdfAnalyzerTest {
 	@Test
 	public void testForwardWithAtLeastOneIterationAssumptionForJavac() {
 		// this test simulates javac behaviour in Eclipse IDE
-		compareAnalyzerResultWithAnnotationsSingleInputWithKeys(GroupReduceFunction.class, GroupReduce4_Javac.class,
+		compareAnalyzerResultWithAnnotationsSingleInputWithKeys(GroupReduceFunction.class, GroupReduce4Javac.class,
 				"Tuple2<Long, Long>", "Long", new String[] { "f0" });
 	}
 
-	public static class GroupReduce5 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce5 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
 			Long id = 0L;
@@ -986,7 +990,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<Long, Long>", "Long", new String[] { "f1" });
 	}
 
-	public static class GroupReduce6 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce6 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
 			Long id = 0L;
@@ -1004,7 +1008,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<Long, Long>", "Long", new String[] { "f0" });
 	}
 
-	public static class GroupReduce7 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce7 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
 			Long id = 0L;
@@ -1023,7 +1027,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0->*")
-	public static class GroupReduce8 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
+	private static class GroupReduce8 implements GroupReduceFunction<Tuple2<Long, Long>, Long> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Long> out) throws Exception {
 			Long id = 0L;
@@ -1042,7 +1046,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("f0")
-	public static class GroupReduce9 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
+	private static class GroupReduce9 implements GroupReduceFunction<Tuple2<Long, Long>, Tuple2<Long, Long>> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Tuple2<Long, Long>> out) throws Exception {
 			Tuple2<Long, Long> rv = null;
@@ -1060,7 +1064,7 @@ public class UdfAnalyzerTest {
 				"Tuple2<Long, Long>", "Tuple2<Long, Long>", new String[] { "f0" });
 	}
 
-	public static class GroupReduce10 implements GroupReduceFunction<Tuple2<Long, Long>, Boolean> {
+	private static class GroupReduce10 implements GroupReduceFunction<Tuple2<Long, Long>, Boolean> {
 		@Override
 		public void reduce(Iterable<Tuple2<Long, Long>> values, Collector<Boolean> out) throws Exception {
 			Iterator<Tuple2<Long, Long>> it = values.iterator();
@@ -1082,7 +1086,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field")
-	public static class Reduce1 implements ReduceFunction<MyPojo> {
+	private static class Reduce1 implements ReduceFunction<MyPojo> {
 		@Override
 		public MyPojo reduce(MyPojo value1, MyPojo value2) throws Exception {
 			return new MyPojo(value1.getField(), value2.getField2());
@@ -1098,7 +1102,7 @@ public class UdfAnalyzerTest {
 	}
 
 	@ForwardedFields("field")
-	public static class Reduce2 implements ReduceFunction<MyPojo> {
+	private static class Reduce2 implements ReduceFunction<MyPojo> {
 		@Override
 		public MyPojo reduce(MyPojo value1, MyPojo value2) throws Exception {
 			if (value1.field != null && value1.field.isEmpty()) {
@@ -1116,14 +1120,14 @@ public class UdfAnalyzerTest {
 				new String[] { "field" });
 	}
 
-	public static class NullReturnMapper1 implements MapFunction<String, String> {
+	private static class NullReturnMapper1 implements MapFunction<String, String> {
 		@Override
 		public String map(String value) throws Exception {
 			return null;
 		}
 	}
 
-	public static class NullReturnMapper2 implements MapFunction<String, String> {
+	private static class NullReturnMapper2 implements MapFunction<String, String> {
 		@Override
 		public String map(String value) throws Exception {
 			if (value.equals("test")) {
@@ -1133,7 +1137,7 @@ public class UdfAnalyzerTest {
 		}
 	}
 
-	public static class NullReturnFlatMapper implements FlatMapFunction<String, String> {
+	private static class NullReturnFlatMapper implements FlatMapFunction<String, String> {
 		@Override
 		public void flatMap(String value, Collector<String> out) throws Exception {
 			String s = null;
@@ -1175,8 +1179,7 @@ public class UdfAnalyzerTest {
 		}
 	}
 
-
-	public static class PutStaticMapper implements MapFunction<String, String> {
+	private static class PutStaticMapper implements MapFunction<String, String> {
 		public static String test = "";
 
 		@Override
@@ -1199,7 +1202,7 @@ public class UdfAnalyzerTest {
 		}
 	}
 
-	public static class FilterMod1 implements FilterFunction<Tuple2<String, String>> {
+	private static class FilterMod1 implements FilterFunction<Tuple2<String, String>> {
 
 		@Override
 		public boolean filter(Tuple2<String, String> value) throws Exception {
@@ -1221,7 +1224,7 @@ public class UdfAnalyzerTest {
 		}
 	}
 
-	public static class FilterMod2 implements FilterFunction<Tuple2<String, String>> {
+	private static class FilterMod2 implements FilterFunction<Tuple2<String, String>> {
 
 		@Override
 		public boolean filter(Tuple2<String, String> value) throws Exception {
@@ -1247,6 +1250,9 @@ public class UdfAnalyzerTest {
 	// Utils
 	// --------------------------------------------------------------------------------------------
 
+	/**
+	 * Simple POJO with two fields.
+	 */
 	public static class MyPojo {
 		private String field;
 		public String field2;
@@ -1277,6 +1283,9 @@ public class UdfAnalyzerTest {
 		}
 	}
 
+	/**
+	 * Simple POJO extending {@link MyPojo}.
+	 */
 	public static class MyPojo2 extends MyPojo {
 
 		public MyPojo2() {
@@ -1284,7 +1293,7 @@ public class UdfAnalyzerTest {
 		}
 	}
 
-	public static class ExtendingTuple extends Tuple2<String, String> {
+	private static class ExtendingTuple extends Tuple2<String, String> {
 		public void setFirstField() {
 			setField("Hello", 0);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/53d6582d/tools/maven/suppressions-java.xml
----------------------------------------------------------------------
diff --git a/tools/maven/suppressions-java.xml b/tools/maven/suppressions-java.xml
index 9ddacba..4f13e3f 100644
--- a/tools/maven/suppressions-java.xml
+++ b/tools/maven/suppressions-java.xml
@@ -32,10 +32,6 @@ under the License.
 		checks="UnusedImports|AvoidStarImport"/>
 
 	<suppress
-		files="(.*)api[/\\]java[/\\]sca[/\\](.*)"
-		checks="NewlineAtEndOfFile|RegexpSingleline|TodoComment|RedundantImport|ImportOrder|RedundantModifier|JavadocMethod|JavadocParagraph|JavadocType|JavadocStyle|PackageName|TypeNameCheck|ConstantNameCheck|StaticVariableNameCheck|MemberNameCheck|MethodNameCheck|ParameterName|LocalFinalVariableName|LocalVariableName|LeftCurly|UpperEll|FallThrough|reliefPattern|SimplifyBooleanExpression|EmptyStatement|ModifierOrder|EmptyLineSeparator|WhitespaceAround|WhitespaceAfter|NoWhitespaceAfter|NoWhitespaceBefore|OperatorWrap|ParenPad"/>
-
-	<suppress
 		files="(.*)api[/\\]java[/\\]summarize[/\\](.*)"
 		checks="NewlineAtEndOfFile|RegexpSingleline|TodoComment|RedundantImport|ImportOrder|RedundantModifier|JavadocMethod|JavadocParagraph|JavadocType|JavadocStyle|PackageName|TypeNameCheck|ConstantNameCheck|StaticVariableNameCheck|MemberNameCheck|MethodNameCheck|ParameterName|LocalFinalVariableName|LocalVariableName|LeftCurly|UpperEll|FallThrough|reliefPattern|SimplifyBooleanExpression|EmptyStatement|ModifierOrder|EmptyLineSeparator|WhitespaceAround|WhitespaceAfter|NoWhitespaceAfter|NoWhitespaceBefore|OperatorWrap|ParenPad"/>
 	<!--Only additional checks for test sources. Those checks were present in the "pre-strict" checkstyle but were not applied to test sources. We do not want to suppress them for sources directory-->


[4/6] flink git commit: [FLINK-7181] Activate checkstyle flink-java/operators/*

Posted by dw...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java
index ee5ab2e..bac85ee 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSink.java
@@ -33,23 +33,27 @@ import org.apache.flink.api.common.operators.ResourceSpec;
 import org.apache.flink.api.common.operators.UnaryOperatorInformation;
 import org.apache.flink.api.common.typeinfo.NothingTypeInfo;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.configuration.Configuration;
 import org.apache.flink.api.java.DataSet;
+import org.apache.flink.configuration.Configuration;
 import org.apache.flink.util.Preconditions;
 
 import java.util.Arrays;
 
+/**
+ * An operation that allows storing data results.
+ * @param <T>
+ */
 @Public
 public class DataSink<T> {
-	
+
 	private final OutputFormat<T> format;
-	
+
 	private final TypeInformation<T> type;
-	
+
 	private final DataSet<T> data;
-	
+
 	private String name;
-	
+
 	private int parallelism = ExecutionConfig.PARALLELISM_DEFAULT;
 
 	private ResourceSpec minResources = ResourceSpec.DEFAULT;
@@ -72,14 +76,12 @@ public class DataSink<T> {
 		if (data == null) {
 			throw new IllegalArgumentException("The data set must not be null.");
 		}
-		
-		
+
 		this.format = format;
 		this.data = data;
 		this.type = type;
 	}
 
-
 	@Internal
 	public OutputFormat<T> getFormat() {
 		return format;
@@ -96,7 +98,7 @@ public class DataSink<T> {
 	}
 
 	/**
-	 * Pass a configuration to the OutputFormat
+	 * Pass a configuration to the OutputFormat.
 	 * @param parameters Configuration parameters
 	 */
 	public DataSink<T> withParameters(Configuration parameters) {
@@ -106,9 +108,11 @@ public class DataSink<T> {
 
 	/**
 	 * Sorts each local partition of a {@link org.apache.flink.api.java.tuple.Tuple} data set
-	 * on the specified field in the specified {@link Order} before it is emitted by the output format.<br>
-	 * <b>Note: Only tuple data sets can be sorted using integer field indices.</b><br>
-	 * The tuple data set can be sorted on multiple fields in different orders
+	 * on the specified field in the specified {@link Order} before it is emitted by the output format.
+	 *
+	 * <p><b>Note: Only tuple data sets can be sorted using integer field indices.</b>
+	 *
+	 * <p>The tuple data set can be sorted on multiple fields in different orders
 	 * by chaining {@link #sortLocalOutput(int, Order)} calls.
 	 *
 	 * @param field The Tuple field on which the data set is locally sorted.
@@ -132,7 +136,7 @@ public class DataSink<T> {
 			throw new InvalidProgramException("Selected sort key is not a sortable type");
 		}
 
-		if(this.sortKeyPositions == null) {
+		if (this.sortKeyPositions == null) {
 			// set sorting info
 			this.sortKeyPositions = flatKeys;
 			this.sortOrders = new Order[flatKeys.length];
@@ -144,9 +148,9 @@ public class DataSink<T> {
 			this.sortKeyPositions = Arrays.copyOf(this.sortKeyPositions, newLength);
 			this.sortOrders = Arrays.copyOf(this.sortOrders, newLength);
 
-			for(int i=0; i<flatKeys.length; i++) {
-				this.sortKeyPositions[oldLength+i] = flatKeys[i];
-				this.sortOrders[oldLength+i] = order;
+			for (int i = 0; i < flatKeys.length; i++) {
+				this.sortKeyPositions[oldLength + i] = flatKeys[i];
+				this.sortOrders[oldLength + i] = order;
 			}
 		}
 
@@ -155,10 +159,12 @@ public class DataSink<T> {
 
 	/**
 	 * Sorts each local partition of a data set on the field(s) specified by the field expression
-	 * in the specified {@link Order} before it is emitted by the output format.<br>
-	 * <b>Note: Non-composite types can only be sorted on the full element which is specified by
-	 * a wildcard expression ("*" or "_").</b><br>
-	 * Data sets of composite types (Tuple or Pojo) can be sorted on multiple fields in different orders
+	 * in the specified {@link Order} before it is emitted by the output format.
+	 *
+	 * <p><b>Note: Non-composite types can only be sorted on the full element which is specified by
+	 * a wildcard expression ("*" or "_").</b>
+	 *
+	 * <p>Data sets of composite types (Tuple or Pojo) can be sorted on multiple fields in different orders
 	 * by chaining {@link #sortLocalOutput(String, Order)} calls.
 	 *
 	 * @param fieldExpression The field expression for the field(s) on which the data set is locally sorted.
@@ -189,7 +195,7 @@ public class DataSink<T> {
 		orders = new Order[numFields];
 		Arrays.fill(orders, order);
 
-		if(this.sortKeyPositions == null) {
+		if (this.sortKeyPositions == null) {
 			// set sorting info
 			this.sortKeyPositions = fields;
 			this.sortOrders = orders;
@@ -199,9 +205,9 @@ public class DataSink<T> {
 			int newLength = oldLength + numFields;
 			this.sortKeyPositions = Arrays.copyOf(this.sortKeyPositions, newLength);
 			this.sortOrders = Arrays.copyOf(this.sortOrders, newLength);
-			for(int i=0; i<numFields; i++) {
-				this.sortKeyPositions[oldLength+i] = fields[i];
-				this.sortOrders[oldLength+i] = orders[i];
+			for (int i = 0; i < numFields; i++) {
+				this.sortKeyPositions[oldLength + i] = fields[i];
+				this.sortOrders[oldLength + i] = orders[i];
 			}
 		}
 
@@ -214,16 +220,16 @@ public class DataSink<T> {
 	public Configuration getParameters() {
 		return this.parameters;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	public DataSink<T> name(String name) {
 		this.name = name;
 		return this;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	protected GenericDataSinkBase<T> translateToDataFlow(Operator<T> input) {
 		// select the name (or create a default one)
 		String name = this.name != null ? this.name : this.format.toString();
@@ -231,11 +237,11 @@ public class DataSink<T> {
 		// set input
 		sink.setInput(input);
 		// set parameters
-		if(this.parameters != null) {
+		if (this.parameters != null) {
 			sink.getParameters().addAll(this.parameters);
 		}
 		// set parallelism
-		if(this.parallelism > 0) {
+		if (this.parallelism > 0) {
 			// use specified parallelism
 			sink.setParallelism(this.parallelism);
 		} else {
@@ -243,34 +249,34 @@ public class DataSink<T> {
 			sink.setParallelism(input.getParallelism());
 		}
 
-		if(this.sortKeyPositions != null) {
+		if (this.sortKeyPositions != null) {
 			// configure output sorting
 			Ordering ordering = new Ordering();
-			for(int i=0; i<this.sortKeyPositions.length; i++) {
+			for (int i = 0; i < this.sortKeyPositions.length; i++) {
 				ordering.appendOrdering(this.sortKeyPositions[i], null, this.sortOrders[i]);
 			}
 			sink.setLocalOrder(ordering);
 		}
-		
+
 		return sink;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	@Override
 	public String toString() {
 		return "DataSink '" + (this.name == null ? "<unnamed>" : this.name) + "' (" + this.format.toString() + ")";
 	}
-	
+
 	/**
 	 * Returns the parallelism of this data sink.
-	 * 
+	 *
 	 * @return The parallelism of this data sink.
 	 */
 	public int getParallelism() {
 		return this.parallelism;
 	}
-	
+
 	/**
 	 * Sets the parallelism for this data sink.
 	 * The degree must be 1 or more.

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java
index af6f65b..8ae1c7d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DataSource.java
@@ -34,7 +34,7 @@ import org.apache.flink.configuration.Configuration;
  * An operation that creates a new data set (data source). The operation acts as the
  * data set on which to apply further transformations. It encapsulates additional
  * configuration parameters, to customize the execution.
- * 
+ *
  * @param <OUT> The type of the elements produced by this data source.
  */
 @Public
@@ -49,25 +49,25 @@ public class DataSource<OUT> extends Operator<OUT, DataSource<OUT>> {
 	private SplitDataProperties<OUT> splitDataProperties;
 
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Creates a new data source.
-	 * 
+	 *
 	 * @param context The environment in which the data source gets executed.
 	 * @param inputFormat The input format that the data source executes.
 	 * @param type The type of the elements produced by this input format.
 	 */
 	public DataSource(ExecutionEnvironment context, InputFormat<OUT, ?> inputFormat, TypeInformation<OUT> type, String dataSourceLocationName) {
 		super(context, type);
-		
+
 		this.dataSourceLocationName = dataSourceLocationName;
-		
+
 		if (inputFormat == null) {
 			throw new IllegalArgumentException("The input format may not be null.");
 		}
-		
+
 		this.inputFormat = inputFormat;
-		
+
 		if (inputFormat instanceof NonParallelInput) {
 			this.parallelism = 1;
 		}
@@ -75,23 +75,23 @@ public class DataSource<OUT> extends Operator<OUT, DataSource<OUT>> {
 
 	/**
 	 * Gets the input format that is executed by this data source.
-	 * 
+	 *
 	 * @return The input format that is executed by this data source.
 	 */
 	@Internal
 	public InputFormat<OUT, ?> getInputFormat() {
 		return this.inputFormat;
 	}
-	
+
 	/**
-	 * Pass a configuration to the InputFormat
+	 * Pass a configuration to the InputFormat.
 	 * @param parameters Configuration parameters
 	 */
 	public DataSource<OUT> withParameters(Configuration parameters) {
 		this.parameters = parameters;
 		return this;
 	}
-	
+
 	/**
 	 * @return Configuration for the InputFormat.
 	 */
@@ -99,15 +99,15 @@ public class DataSource<OUT> extends Operator<OUT, DataSource<OUT>> {
 		return this.parameters;
 	}
 
-
 	/**
 	 * Returns the {@link org.apache.flink.api.java.io.SplitDataProperties} for the
 	 * {@link org.apache.flink.core.io.InputSplit}s of this DataSource
 	 * for configurations.
 	 *
-	 * SplitDataProperties can help to generate more efficient execution plans.
-	 * <br>
-	 * <b>
+	 * <p>SplitDataProperties can help to generate more efficient execution plans.
+	 *
+	 *
+	 * <p><b>
 	 *     IMPORTANT: Incorrect configuration of SplitDataProperties can cause wrong results!
 	 * </b>
 	 *
@@ -115,28 +115,28 @@ public class DataSource<OUT> extends Operator<OUT, DataSource<OUT>> {
 	 */
 	@PublicEvolving
 	public SplitDataProperties<OUT> getSplitDataProperties() {
-		if(this.splitDataProperties == null) {
+		if (this.splitDataProperties == null) {
 			this.splitDataProperties = new SplitDataProperties<OUT>(this);
 		}
 		return this.splitDataProperties;
 	}
 
 	// --------------------------------------------------------------------------------------------
-	
+
 	protected GenericDataSourceBase<OUT, ?> translateToDataFlow() {
-		String name = this.name != null ? this.name : "at "+dataSourceLocationName+" ("+inputFormat.getClass().getName()+")";
+		String name = this.name != null ? this.name : "at " + dataSourceLocationName + " (" + inputFormat.getClass().getName() + ")";
 		if (name.length() > 150) {
 			name = name.substring(0, 150);
 		}
-		
-		@SuppressWarnings({ "unchecked", "rawtypes" })
+
+		@SuppressWarnings({"unchecked", "rawtypes"})
 		GenericDataSourceBase<OUT, ?> source = new GenericDataSourceBase(this.inputFormat,
-				new OperatorInformation<OUT>(getType()), name);
+			new OperatorInformation<OUT>(getType()), name);
 		source.setParallelism(parallelism);
-		if(this.parameters != null) {
+		if (this.parameters != null) {
 			source.getParameters().addAll(this.parameters);
 		}
-		if(this.splitDataProperties != null) {
+		if (this.splitDataProperties != null) {
 			source.setSplitDataProperties(this.splitDataProperties);
 		}
 		return source;

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java
index 61f83b1..dc80e70 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIteration.java
@@ -18,10 +18,8 @@
 
 package org.apache.flink.api.java.operators;
 
-import java.util.Arrays;
-
-import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.annotation.Public;
+import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.InvalidProgramException;
 import org.apache.flink.api.common.aggregators.Aggregator;
@@ -35,10 +33,12 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.types.Value;
 import org.apache.flink.util.Preconditions;
 
+import java.util.Arrays;
+
 /**
  * The DeltaIteration represents the start of a delta iteration. It is created from the DataSet that
  * represents the initial solution set via the {@link DataSet#iterateDelta(DataSet, int, int...)} method.
- * 
+ *
  * @param <ST> The data type of the solution set.
  * @param <WT> The data type of the workset (the feedback data set).
  *
@@ -47,27 +47,27 @@ import org.apache.flink.util.Preconditions;
  */
 @Public
 public class DeltaIteration<ST, WT> {
-	
+
 	private final AggregatorRegistry aggregators = new AggregatorRegistry();
-	
+
 	private final DataSet<ST> initialSolutionSet;
 	private final DataSet<WT> initialWorkset;
-	
+
 	private final SolutionSetPlaceHolder<ST> solutionSetPlaceholder;
 	private final WorksetPlaceHolder<WT> worksetPlaceholder;
 
 	private final Keys<ST> keys;
-	
+
 	private final int maxIterations;
-	
+
 	private String name;
-	
+
 	private int parallelism = ExecutionConfig.PARALLELISM_DEFAULT;
 
 	private ResourceSpec minResources = ResourceSpec.DEFAULT;
 
 	private ResourceSpec preferredResources = ResourceSpec.DEFAULT;
-	
+
 	private boolean solutionSetUnManaged;
 
 	public DeltaIteration(ExecutionEnvironment context, TypeInformation<ST> type, DataSet<ST> solutionSet, DataSet<WT> workset, Keys<ST> keys, int maxIterations) {
@@ -80,15 +80,15 @@ public class DeltaIteration<ST, WT> {
 	}
 
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Closes the delta iteration. This method defines the end of the delta iteration's function.
-	 * 
+	 *
 	 * @param solutionSetDelta The delta for the solution set. The delta will be merged into the solution set at the end of
 	 *                         each iteration.
 	 * @param newWorkset The new workset (feedback data set) that will be fed back to the next iteration.
 	 * @return The DataSet that represents the result of the iteration, after the computation has terminated.
-	 * 
+	 *
 	 * @see DataSet#iterateDelta(DataSet, int, int...)
 	 */
 	public DataSet<ST> closeWith(DataSet<ST> solutionSetDelta, DataSet<WT> newWorkset) {
@@ -98,18 +98,18 @@ public class DeltaIteration<ST, WT> {
 
 	/**
 	 * Gets the initial solution set. This is the data set on which the delta iteration was started.
-	 * <p>
-	 * Consider the following example:
+	 *
+	 * <p>Consider the following example:
 	 * <pre>
 	 * {@code
 	 * DataSet<MyType> solutionSetData = ...;
 	 * DataSet<AnotherType> worksetData = ...;
-	 * 
+	 *
 	 * DeltaIteration<MyType, AnotherType> iteration = solutionSetData.iteratorDelta(worksetData, 10, ...);
 	 * }
 	 * </pre>
 	 * The <tt>solutionSetData</tt> would be the data set returned by {@code iteration.getInitialSolutionSet();}.
-	 * 
+	 *
 	 * @return The data set that forms the initial solution set.
 	 */
 	public DataSet<ST> getInitialSolutionSet() {
@@ -119,18 +119,18 @@ public class DeltaIteration<ST, WT> {
 	/**
 	 * Gets the initial workset. This is the data set passed to the method that starts the delta
 	 * iteration.
-	 * <p>
-	 * Consider the following example:
+	 *
+	 * <p>Consider the following example:
 	 * <pre>
 	 * {@code
 	 * DataSet<MyType> solutionSetData = ...;
 	 * DataSet<AnotherType> worksetData = ...;
-	 * 
+	 *
 	 * DeltaIteration<MyType, AnotherType> iteration = solutionSetData.iteratorDelta(worksetData, 10, ...);
 	 * }
 	 * </pre>
 	 * The <tt>worksetData</tt> would be the data set returned by {@code iteration.getInitialWorkset();}.
-	 * 
+	 *
 	 * @return The data set that forms the initial workset.
 	 */
 	public DataSet<WT> getInitialWorkset() {
@@ -139,7 +139,7 @@ public class DeltaIteration<ST, WT> {
 
 	/**
 	 * Gets the solution set of the delta iteration. The solution set represents the state that is kept across iterations.
-	 * 
+	 *
 	 * @return The solution set of the delta iteration.
 	 */
 	public SolutionSetPlaceHolder<ST> getSolutionSet() {
@@ -157,7 +157,7 @@ public class DeltaIteration<ST, WT> {
 
 	/**
 	 * Sets the name for the iteration. The name is displayed in logs and messages.
-	 * 
+	 *
 	 * @param name The name for the iteration.
 	 * @return The iteration object, for function call chaining.
 	 */
@@ -165,16 +165,16 @@ public class DeltaIteration<ST, WT> {
 		this.name = name;
 		return this;
 	}
-	
+
 	/**
 	 * Gets the name from this iteration.
-	 * 
+	 *
 	 * @return The name of the iteration.
 	 */
 	public String getName() {
 		return name;
 	}
-	
+
 	/**
 	 * Sets the parallelism for the iteration.
 	 *
@@ -187,10 +187,10 @@ public class DeltaIteration<ST, WT> {
 		this.parallelism = parallelism;
 		return this;
 	}
-	
+
 	/**
 	 * Gets the iteration's parallelism.
-	 * 
+	 *
 	 * @return The iteration's parallelism, or {@link ExecutionConfig#PARALLELISM_DEFAULT} if not set.
 	 */
 	public int getParallelism() {
@@ -266,13 +266,13 @@ public class DeltaIteration<ST, WT> {
 	 * iteration, such as number of elements processed. The aggregators compute global aggregates: After each iteration step,
 	 * the values are globally aggregated to produce one aggregate that represents statistics across all parallel instances.
 	 * The value of an aggregator can be accessed in the next iteration.
-	 * <p>
-	 * Aggregators can be accessed inside a function via the
+	 *
+	 * <p>Aggregators can be accessed inside a function via the
 	 * {@link org.apache.flink.api.common.functions.AbstractRichFunction#getIterationRuntimeContext()} method.
-	 * 
+	 *
 	 * @param name The name under which the aggregator is registered.
 	 * @param aggregator The aggregator class.
-	 * 
+	 *
 	 * @return The DeltaIteration itself, to allow chaining function calls.
 	 */
 	@PublicEvolving
@@ -297,62 +297,61 @@ public class DeltaIteration<ST, WT> {
 	 */
 	@PublicEvolving
 	public <X extends Value> DeltaIteration<ST, WT> registerAggregationConvergenceCriterion(
-			String name, Aggregator<X> aggregator, ConvergenceCriterion<X> convergenceCheck)
-	{
+			String name, Aggregator<X> aggregator, ConvergenceCriterion<X> convergenceCheck) {
 		this.aggregators.registerAggregationConvergenceCriterion(name, aggregator, convergenceCheck);
 		return this;
 	}
-	
+
 	/**
 	 * Gets the registry for aggregators for the iteration.
-	 * 
+	 *
 	 * @return The registry with all aggregators.
 	 */
 	@PublicEvolving
 	public AggregatorRegistry getAggregators() {
 		return this.aggregators;
 	}
-	
+
 	/**
 	 * Sets whether to keep the solution set in managed memory (safe against heap exhaustion) or unmanaged memory
 	 * (objects on heap).
-	 * 
+	 *
 	 * @param solutionSetUnManaged True to keep the solution set in unmanaged memory, false to keep it in managed memory.
-	 * 
+	 *
 	 * @see #isSolutionSetUnManaged()
 	 */
 	public void setSolutionSetUnManaged(boolean solutionSetUnManaged) {
 		this.solutionSetUnManaged = solutionSetUnManaged;
 	}
-	
+
 	/**
 	 * gets whether the solution set is in managed or unmanaged memory.
-	 * 
+	 *
 	 * @return True, if the solution set is in unmanaged memory (object heap), false if in managed memory.
-	 * 
+	 *
 	 * @see #setSolutionSetUnManaged(boolean)
 	 */
 	public boolean isSolutionSetUnManaged() {
 		return solutionSetUnManaged;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * A {@link DataSet} that acts as a placeholder for the solution set during the iteration.
-	 * 
+	 *
 	 * @param <ST> The type of the elements in the solution set.
 	 */
 	@Public
 	public static class SolutionSetPlaceHolder<ST> extends DataSet<ST>{
-		
+
 		private final DeltaIteration<ST, ?> deltaIteration;
-		
+
 		private SolutionSetPlaceHolder(ExecutionEnvironment context, TypeInformation<ST> type, DeltaIteration<ST, ?> deltaIteration) {
 			super(context, type);
 			this.deltaIteration = deltaIteration;
 		}
-		
+
 		public void checkJoinKeyFields(int[] keyFields) {
 			int[] ssKeys = deltaIteration.keys.computeLogicalKeyPositions();
 			if (!Arrays.equals(ssKeys, keyFields)) {

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIterationResultSet.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIterationResultSet.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIterationResultSet.java
index e593488..de93dbb 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIterationResultSet.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DeltaIterationResultSet.java
@@ -17,6 +17,7 @@
  */
 
 package org.apache.flink.api.java.operators;
+
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.operators.Keys;
@@ -24,19 +25,24 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 
+/**
+ * Resulting {@link DataSet} of a delta iteration operation.
+ * @param <ST>
+ * @param <WT>
+ */
 @Public
 public class DeltaIterationResultSet<ST, WT> extends DataSet<ST> {
 
 	private DeltaIteration<ST, WT> iterationHead;
 
 	private DataSet<ST> nextSolutionSet;
-	
+
 	private DataSet<WT> nextWorkset;
-	
+
 	private Keys<ST> keys;
-	
+
 	private int maxIterations;
-	
+
 	private TypeInformation<WT> typeWS;
 
 	DeltaIterationResultSet(ExecutionEnvironment context,
@@ -46,8 +52,7 @@ public class DeltaIterationResultSet<ST, WT> extends DataSet<ST> {
 							DataSet<ST> nextSolutionSet,
 							DataSet<WT> nextWorkset,
 							Keys<ST> keys,
-							int maxIterations)
-	{
+							int maxIterations) {
 		super(context, typeSS);
 		this.iterationHead = iterationHead;
 		this.nextWorkset = nextWorkset;
@@ -60,7 +65,7 @@ public class DeltaIterationResultSet<ST, WT> extends DataSet<ST> {
 	public DeltaIteration<ST, WT> getIterationHead() {
 		return iterationHead;
 	}
-	
+
 	public DataSet<ST> getNextSolutionSet() {
 		return nextSolutionSet;
 	}
@@ -78,7 +83,7 @@ public class DeltaIterationResultSet<ST, WT> extends DataSet<ST> {
 	public int getMaxIterations() {
 		return maxIterations;
 	}
-	
+
 	public TypeInformation<WT> getWorksetType() {
 		return typeWS;
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/DistinctOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/DistinctOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/DistinctOperator.java
index 267513d..9dc2a9c 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/DistinctOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/DistinctOperator.java
@@ -111,7 +111,7 @@ public class DistinctOperator<T> extends SingleInputOperator<T, T, DistinctOpera
 	/**
 	 * Sets the strategy to use for the combine phase of the reduce.
 	 *
-	 * If this method is not called, then the default hint will be used.
+	 * <p>If this method is not called, then the default hint will be used.
 	 * ({@link org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint#OPTIMIZER_CHOOSES})
 	 *
 	 * @param strategy The hint to use.
@@ -132,8 +132,7 @@ public class DistinctOperator<T> extends SingleInputOperator<T, T, DistinctOpera
 			String name,
 			Operator<IN> input,
 			int parallelism,
-			CombineHint hint)
-	{
+			CombineHint hint) {
 		@SuppressWarnings("unchecked")
 		final SelectorFunctionKeys<IN, K> keys = (SelectorFunctionKeys<IN, K>) rawKeys;
 
@@ -150,7 +149,7 @@ public class DistinctOperator<T> extends SingleInputOperator<T, T, DistinctOpera
 	}
 
 	@Internal
-	public static final class DistinctFunction<T> implements ReduceFunction<T> {
+	private static final class DistinctFunction<T> implements ReduceFunction<T> {
 
 		private static final long serialVersionUID = 1L;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/FilterOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/FilterOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/FilterOperator.java
index ae8b5ea..901274e 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/FilterOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/FilterOperator.java
@@ -22,45 +22,45 @@ import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.functions.FilterFunction;
 import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.common.operators.Operator;
-import org.apache.flink.api.java.operators.translation.PlanFilterOperator;
 import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.operators.translation.PlanFilterOperator;
 
 /**
  * This operator represents the application of a "filter" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <T> The type of the data set filtered by the operator.
  */
 @Public
 public class FilterOperator<T> extends SingleInputUdfOperator<T, T, FilterOperator<T>> {
-	
+
 	protected final FilterFunction<T> function;
-	
+
 	protected final String defaultName;
 
 	public FilterOperator(DataSet<T> input, FilterFunction<T> function, String defaultName) {
 		super(input, input.getType());
-		
+
 		this.function = function;
 		this.defaultName = defaultName;
 
 		UdfOperatorUtils.analyzeSingleInputUdf(this, FilterFunction.class, defaultName, function, null);
 	}
-	
+
 	@Override
 	protected FilterFunction<T> getFunction() {
 		return function;
 	}
-	
+
 	@Override
-	protected org.apache.flink.api.common.operators.base.FilterOperatorBase<T, FlatMapFunction<T,T>> translateToDataFlow(Operator<T> input) {
-		
-		String name = getName() != null ? getName() : "Filter at "+defaultName;
-		
+	protected org.apache.flink.api.common.operators.base.FilterOperatorBase<T, FlatMapFunction<T, T>> translateToDataFlow(Operator<T> input) {
+
+		String name = getName() != null ? getName() : "Filter at " + defaultName;
+
 		// create operator
 		PlanFilterOperator<T> po = new PlanFilterOperator<T>(function, name, getInputType());
 		po.setInput(input);
-		
+
 		// set parallelism
 		if (getParallelism() > 0) {
 			// use specified parallelism
@@ -69,7 +69,7 @@ public class FilterOperator<T> extends SingleInputUdfOperator<T, T, FilterOperat
 			// if no parallelism has been specified, use parallelism of input operator to enable chaining
 			po.setParallelism(input.getParallelism());
 		}
-		
+
 		return po;
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/FlatMapOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/FlatMapOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/FlatMapOperator.java
index ed730ae..56d7cb8 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/FlatMapOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/FlatMapOperator.java
@@ -29,48 +29,48 @@ import org.apache.flink.api.java.DataSet;
 /**
  * This operator represents the application of a "flatMap" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set consumed by the operator.
  * @param <OUT> The type of the data set created by the operator.
  */
 @Public
 public class FlatMapOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT, FlatMapOperator<IN, OUT>> {
-	
+
 	protected final FlatMapFunction<IN, OUT> function;
-	
+
 	protected final String defaultName;
-	
+
 	public FlatMapOperator(DataSet<IN> input, TypeInformation<OUT> resultType, FlatMapFunction<IN, OUT> function, String defaultName) {
 		super(input, resultType);
-		
+
 		this.function = function;
 		this.defaultName = defaultName;
 
 		UdfOperatorUtils.analyzeSingleInputUdf(this, FlatMapFunction.class, defaultName, function, null);
 	}
-	
+
 	@Override
 	protected FlatMapFunction<IN, OUT> getFunction() {
 		return function;
 	}
 
 	@Override
-	protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN,OUT>> translateToDataFlow(Operator<IN> input) {
-		String name = getName() != null ? getName() : "FlatMap at "+defaultName;
+	protected FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> translateToDataFlow(Operator<IN> input) {
+		String name = getName() != null ? getName() : "FlatMap at " + defaultName;
 		// create operator
 		FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>> po = new FlatMapOperatorBase<IN, OUT, FlatMapFunction<IN, OUT>>(function,
-				new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name);
+			new UnaryOperatorInformation<IN, OUT>(getInputType(), getResultType()), name);
 		// set input
 		po.setInput(input);
 		// set parallelism
-		if(this.getParallelism() > 0) {
+		if (this.getParallelism() > 0) {
 			// use specified parallelism
 			po.setParallelism(this.getParallelism());
 		} else {
 			// if no parallelism has been specified, use parallelism of input operator to enable chaining
 			po.setParallelism(input.getParallelism());
 		}
-		
+
 		return po;
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupCombineOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupCombineOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupCombineOperator.java
index 0c8e657..e4ed07f 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupCombineOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupCombineOperator.java
@@ -22,6 +22,7 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.annotation.Public;
 import org.apache.flink.api.common.functions.GroupCombineFunction;
 import org.apache.flink.api.common.operators.Keys;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.Order;
 import org.apache.flink.api.common.operators.Ordering;
@@ -33,13 +34,12 @@ import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
 import org.apache.flink.api.java.operators.translation.PlanUnwrappingGroupCombineOperator;
 import org.apache.flink.api.java.operators.translation.PlanUnwrappingSortedGroupCombineOperator;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
 
 /**
  * This operator behaves like the GroupReduceOperator with Combine but only runs the Combine part which reduces all data
- * locally in their partitions. The combine part can return an arbitrary data type. This is useful to pre-combine values 
+ * locally in their partitions. The combine part can return an arbitrary data type. This is useful to pre-combine values
  * into an intermediate representation before applying a proper reduce operation.
  *
  * @param <IN> The type of the data set consumed by the operator.
@@ -95,12 +95,12 @@ public class GroupCombineOperator<IN, OUT> extends SingleInputUdfOperator<IN, OU
 		SingleInputSemanticProperties props = super.getSemanticProperties();
 
 		// offset semantic information by extracted key fields
-		if(props != null &&
+		if (props != null &&
 				this.grouper != null &&
 				this.grouper.keys instanceof SelectorFunctionKeys) {
 
-			int offset = ((SelectorFunctionKeys<?,?>) this.grouper.keys).getKeyType().getTotalFields();
-			if(this.grouper instanceof SortedGrouping) {
+			int offset = ((SelectorFunctionKeys<?, ?>) this.grouper.keys).getKeyType().getTotalFields();
+			if (this.grouper instanceof SortedGrouping) {
 				offset += ((SortedGrouping<?>) this.grouper).getSortSelectionFunctionKey().getKeyType().getTotalFields();
 			}
 
@@ -174,7 +174,7 @@ public class GroupCombineOperator<IN, OUT> extends SingleInputUdfOperator<IN, OU
 				Order[] sortOrders = sortedGrouper.getGroupSortOrders();
 
 				Ordering o = new Ordering();
-				for(int i=0; i < sortKeyPositions.length; i++) {
+				for (int i = 0; i < sortKeyPositions.length; i++) {
 					o.appendOrdering(sortKeyPositions[i], null, sortOrders[i]);
 				}
 				po.setGroupOrder(o);
@@ -187,7 +187,6 @@ public class GroupCombineOperator<IN, OUT> extends SingleInputUdfOperator<IN, OU
 		}
 	}
 
-
 	// --------------------------------------------------------------------------------------------
 
 	@SuppressWarnings("unchecked")
@@ -196,8 +195,7 @@ public class GroupCombineOperator<IN, OUT> extends SingleInputUdfOperator<IN, OU
 			GroupCombineFunction<IN, OUT> function,
 			TypeInformation<OUT> outputType,
 			String name,
-			Operator<IN> input)
-	{
+			Operator<IN> input) {
 		final SelectorFunctionKeys<IN, K> keys = (SelectorFunctionKeys<IN, K>) rawKeys;
 
 		TypeInformation<Tuple2<K, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys);
@@ -218,10 +216,9 @@ public class GroupCombineOperator<IN, OUT> extends SingleInputUdfOperator<IN, OU
 			GroupCombineFunction<IN, OUT> function,
 			TypeInformation<OUT> outputType,
 			String name,
-			Operator<IN> input)
-	{
+			Operator<IN> input) {
 		final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey;
-		final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>)rawSortingKeys;
+		final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKeys;
 		TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey);
 
 		Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey);

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupReduceOperator.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupReduceOperator.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupReduceOperator.java
index b339baf..069ac44 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupReduceOperator.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/GroupReduceOperator.java
@@ -24,7 +24,8 @@ import org.apache.flink.api.common.functions.CombineFunction;
 import org.apache.flink.api.common.functions.GroupCombineFunction;
 import org.apache.flink.api.common.functions.GroupReduceFunction;
 import org.apache.flink.api.common.functions.RichGroupReduceFunction;
-import org.apache.flink.api.java.operators.translation.CombineToGroupCombineWrapper;
+import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
+import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
 import org.apache.flink.api.common.operators.Operator;
 import org.apache.flink.api.common.operators.Order;
 import org.apache.flink.api.common.operators.Ordering;
@@ -32,15 +33,15 @@ import org.apache.flink.api.common.operators.SingleInputSemanticProperties;
 import org.apache.flink.api.common.operators.UnaryOperatorInformation;
 import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.functions.SemanticPropUtil;
-import org.apache.flink.api.common.operators.Keys.SelectorFunctionKeys;
-import org.apache.flink.api.common.operators.Keys.ExpressionKeys;
+import org.apache.flink.api.java.operators.translation.CombineToGroupCombineWrapper;
 import org.apache.flink.api.java.operators.translation.PlanUnwrappingReduceGroupOperator;
 import org.apache.flink.api.java.operators.translation.PlanUnwrappingSortedReduceGroupOperator;
 import org.apache.flink.api.java.operators.translation.RichCombineToGroupCombineWrapper;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.tuple.Tuple3;
-import org.apache.flink.api.java.DataSet;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -50,7 +51,7 @@ import java.lang.reflect.Type;
 /**
  * This operator represents the application of a "reduceGroup" function on a data set, and the
  * result data set produced by the function.
- * 
+ *
  * @param <IN> The type of the data set consumed by the operator.
  * @param <OUT> The type of the data set created by the operator.
  */
@@ -62,14 +63,14 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 	private GroupReduceFunction<IN, OUT> function;
 
 	private final Grouping<IN> grouper;
-	
+
 	private final String defaultName;
 
 	private boolean combinable;
 
 	/**
 	 * Constructor for a non-grouped reduce (all reduce).
-	 * 
+	 *
 	 * @param input The input data set to the groupReduce function.
 	 * @param function The user-defined GroupReduce function.
 	 */
@@ -82,10 +83,10 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 
 		this.combinable = checkCombinability();
 	}
-	
+
 	/**
 	 * Constructor for a grouped reduce.
-	 * 
+	 *
 	 * @param input The grouped input to be processed group-wise by the groupReduce function.
 	 * @param function The user-defined GroupReduce function.
 	 */
@@ -149,14 +150,12 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 		}
 		return false;
 	}
-	
-	
+
 	@Override
 	protected GroupReduceFunction<IN, OUT> getFunction() {
 		return function;
 	}
 
-	
 	// --------------------------------------------------------------------------------------------
 	//  Properties
 	// --------------------------------------------------------------------------------------------
@@ -164,10 +163,10 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 	public boolean isCombinable() {
 		return combinable;
 	}
-	
+
 	public GroupReduceOperator<IN, OUT> setCombinable(boolean combinable) {
 
-		if(combinable) {
+		if (combinable) {
 			// sanity check that the function is a subclass of the combine interface
 			if (!checkCombinability()) {
 				throw new IllegalArgumentException("Either the function does not implement a combine interface, " +
@@ -188,12 +187,12 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 		SingleInputSemanticProperties props = super.getSemanticProperties();
 
 		// offset semantic information by extracted key fields
-		if(props != null &&
+		if (props != null &&
 				this.grouper != null &&
 				this.grouper.keys instanceof SelectorFunctionKeys) {
 
-			int offset = ((SelectorFunctionKeys<?,?>) this.grouper.keys).getKeyType().getTotalFields();
-			if(this.grouper instanceof SortedGrouping) {
+			int offset = ((SelectorFunctionKeys<?, ?>) this.grouper.keys).getKeyType().getTotalFields();
+			if (this.grouper instanceof SortedGrouping) {
 				offset += ((SortedGrouping<?>) this.grouper).getSortSelectionFunctionKey().getKeyType().getTotalFields();
 			}
 			props = SemanticPropUtil.addSourceFieldOffset(props, this.getInputType().getTotalFields(), offset);
@@ -205,7 +204,7 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 	// --------------------------------------------------------------------------------------------
 	//  Translation
 	// --------------------------------------------------------------------------------------------
-	
+
 	@Override
 	@SuppressWarnings("unchecked")
 	protected GroupReduceOperatorBase<?, OUT, ?> translateToDataFlow(Operator<IN> input) {
@@ -225,16 +224,16 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 			UnaryOperatorInformation<IN, OUT> operatorInfo = new UnaryOperatorInformation<>(getInputType(), getResultType());
 			GroupReduceOperatorBase<IN, OUT, GroupReduceFunction<IN, OUT>> po =
 					new GroupReduceOperatorBase<>(function, operatorInfo, new int[0], name);
-			
+
 			po.setCombinable(combinable);
 			po.setInput(input);
 			// the parallelism for a non grouped reduce can only be 1
 			po.setParallelism(1);
 			return po;
 		}
-	
+
 		if (grouper.getKeys() instanceof SelectorFunctionKeys) {
-		
+
 			@SuppressWarnings("unchecked")
 			SelectorFunctionKeys<IN, ?> selectorKeys = (SelectorFunctionKeys<IN, ?>) grouper.getKeys();
 
@@ -271,29 +270,28 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 			po.setInput(input);
 			po.setParallelism(getParallelism());
 			po.setCustomPartitioner(grouper.getCustomPartitioner());
-			
+
 			// set group order
 			if (grouper instanceof SortedGrouping) {
 				SortedGrouping<IN> sortedGrouper = (SortedGrouping<IN>) grouper;
 
 				int[] sortKeyPositions = sortedGrouper.getGroupSortKeyPositions();
 				Order[] sortOrders = sortedGrouper.getGroupSortOrders();
-				
+
 				Ordering o = new Ordering();
-				for(int i=0; i < sortKeyPositions.length; i++) {
+				for (int i = 0; i < sortKeyPositions.length; i++) {
 					o.appendOrdering(sortKeyPositions[i], null, sortOrders[i]);
 				}
 				po.setGroupOrder(o);
 			}
-			
+
 			return po;
 		}
 		else {
 			throw new UnsupportedOperationException("Unrecognized key type.");
 		}
 	}
-	
-	
+
 	// --------------------------------------------------------------------------------------------
 
 	@SuppressWarnings("unchecked")
@@ -303,8 +301,7 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 			TypeInformation<OUT> outputType,
 			String name,
 			Operator<IN> input,
-			boolean combinable)
-	{
+			boolean combinable) {
 		SelectorFunctionKeys<IN, K> keys = (SelectorFunctionKeys<IN, K>) rawKeys;
 		TypeInformation<Tuple2<K, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys);
 
@@ -326,11 +323,10 @@ public class GroupReduceOperator<IN, OUT> extends SingleInputUdfOperator<IN, OUT
 		TypeInformation<OUT> outputType,
 		String name,
 		Operator<IN> input,
-		boolean combinable)
-	{
+		boolean combinable) {
 		final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey;
 		final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKey;
-		TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey,sortingKey);
+		TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey);
 
 		Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
index dbaaa9d..74bd9e5 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
@@ -26,8 +26,9 @@ import org.apache.flink.api.common.operators.Keys;
 import org.apache.flink.api.java.DataSet;
 
 /**
- * Grouping is an intermediate step for a transformation on a grouped DataSet.<br>
- * The following transformation can be applied on Grouping:
+ * Grouping is an intermediate step for a transformation on a grouped DataSet.
+ *
+ * <p>The following transformation can be applied on Grouping:
  * <ul>
  * <li>{@link UnsortedGrouping#reduce(org.apache.flink.api.common.functions.ReduceFunction)},</li>
  * <li>{@link UnsortedGrouping#reduceGroup(org.apache.flink.api.common.functions.GroupReduceFunction)}, and</li>
@@ -35,24 +36,23 @@ import org.apache.flink.api.java.DataSet;
  * </ul>
  *
  * @param <T> The type of the elements of the grouped DataSet.
- * 
+ *
  * @see DataSet
  */
 @Public
 public abstract class Grouping<T> {
-	
+
 	protected final DataSet<T> inputDataSet;
-	
+
 	protected final Keys<T> keys;
-	
+
 	protected Partitioner<?> customPartitioner;
 
-	
 	public Grouping(DataSet<T> set, Keys<T> keys) {
 		if (set == null || keys == null) {
 			throw new NullPointerException();
 		}
-		
+
 		if (keys.isEmpty()) {
 			throw new InvalidProgramException("The grouping keys must not be empty.");
 		}
@@ -60,17 +60,17 @@ public abstract class Grouping<T> {
 		this.inputDataSet = set;
 		this.keys = keys;
 	}
-	
+
 	/**
 	 * Returns the input DataSet of a grouping operation, that is the one before the grouping. This means that
 	 * if it is applied directly to the result of a grouping operation, it will cancel its effect. As an example, in the
 	 * following snippet:
-	 * <pre><code>
+	 * <pre>{@code
 	 * DataSet<X> notGrouped = input.groupBy().getDataSet();
 	 * DataSet<Y> allReduced = notGrouped.reduce()
-	 * </pre></code>
-	 * the <code>groupBy()</code> is as if it never happened, as the <code>notGrouped</code> DataSet corresponds
-	 * to the input of the <code>groupBy()</code> (because of the <code>getDataset()</code>).
+	 * }</pre>
+	 * the {@code groupBy()} is as if it never happened, as the {@code notGrouped} DataSet corresponds
+	 * to the input of the {@code groupBy()} (because of the {@code getDataset()}).
 	 * */
 	@Internal
 	public DataSet<T> getInputDataSet() {
@@ -81,11 +81,11 @@ public abstract class Grouping<T> {
 	public Keys<T> getKeys() {
 		return this.keys;
 	}
-	
+
 	/**
 	 * Gets the custom partitioner to be used for this grouping, or {@code null}, if
 	 * none was defined.
-	 * 
+	 *
 	 * @return The custom partitioner to be used for this grouping.
 	 */
 	@Internal

http://git-wip-us.apache.org/repos/asf/flink/blob/34e82f9d/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java
index c7ff6ab..d522f02 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/IterativeDataSet.java
@@ -31,9 +31,9 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.types.Value;
 
 /**
- * The IterativeDataSet represents the start of an iteration. It is created from the DataSet that 
+ * The IterativeDataSet represents the start of an iteration. It is created from the DataSet that
  * represents the initial solution set via the {@link DataSet#iterate(int)} method.
- * 
+ *
  * @param <T> The data type of set that is the input and feedback of the iteration.
  *
  * @see DataSet#iterate(int)
@@ -42,33 +42,33 @@ import org.apache.flink.types.Value;
 public class IterativeDataSet<T> extends SingleInputOperator<T, T, IterativeDataSet<T>> {
 
 	private final AggregatorRegistry aggregators = new AggregatorRegistry();
-	
+
 	private int maxIterations;
 
 	public IterativeDataSet(ExecutionEnvironment context, TypeInformation<T> type, DataSet<T> input, int maxIterations) {
 		super(input, type);
 		this.maxIterations = maxIterations;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Closes the iteration. This method defines the end of the iterative program part.
-	 * 
+	 *
 	 * @param iterationResult The data set that will be fed back to the next iteration.
 	 * @return The DataSet that represents the result of the iteration, after the computation has terminated.
-	 * 
+	 *
 	 * @see DataSet#iterate(int)
 	 */
 	public DataSet<T> closeWith(DataSet<T> iterationResult) {
 		return new BulkIterationResultSet<T>(getExecutionEnvironment(), getType(), this, iterationResult);
 	}
-	
+
 	/**
 	 * Closes the iteration and specifies a termination criterion. This method defines the end of
 	 * the iterative program part.
-	 * <p>
-	 * The termination criterion is a means of dynamically signaling the iteration to halt. It is expressed via a data
+	 *
+	 * <p>The termination criterion is a means of dynamically signaling the iteration to halt. It is expressed via a data
 	 * set that will trigger to halt the loop as soon as the data set is empty. A typical way of using the termination
 	 * criterion is to have a filter that filters out all elements that are considered non-converged. As soon as no more
 	 * such elements exist, the iteration finishes.
@@ -76,7 +76,7 @@ public class IterativeDataSet<T> extends SingleInputOperator<T, T, IterativeData
 	 * @param iterationResult The data set that will be fed back to the next iteration.
 	 * @param terminationCriterion The data set that being used to trigger halt on operation once it is empty.
 	 * @return The DataSet that represents the result of the iteration, after the computation has terminated.
-	 * 
+	 *
 	 * @see DataSet#iterate(int)
 	 */
 	public DataSet<T> closeWith(DataSet<T> iterationResult, DataSet<?> terminationCriterion) {
@@ -85,25 +85,25 @@ public class IterativeDataSet<T> extends SingleInputOperator<T, T, IterativeData
 
 	/**
 	 * Gets the maximum number of iterations.
-	 * 
+	 *
 	 * @return The maximum number of iterations.
 	 */
 	public int getMaxIterations() {
 		return maxIterations;
 	}
-	
+
 	/**
 	 * Registers an {@link Aggregator} for the iteration. Aggregators can be used to maintain simple statistics during the
 	 * iteration, such as number of elements processed. The aggregators compute global aggregates: After each iteration step,
 	 * the values are globally aggregated to produce one aggregate that represents statistics across all parallel instances.
 	 * The value of an aggregator can be accessed in the next iteration.
-	 * <p>
-	 * Aggregators can be accessed inside a function via the
+	 *
+	 * <p>Aggregators can be accessed inside a function via the
 	 * {@link org.apache.flink.api.common.functions.AbstractRichFunction#getIterationRuntimeContext()} method.
-	 * 
+	 *
 	 * @param name The name under which the aggregator is registered.
 	 * @param aggregator The aggregator class.
-	 * 
+	 *
 	 * @return The IterativeDataSet itself, to allow chaining function calls.
 	 */
 	@PublicEvolving
@@ -111,7 +111,7 @@ public class IterativeDataSet<T> extends SingleInputOperator<T, T, IterativeData
 		this.aggregators.registerAggregator(name, aggregator);
 		return this;
 	}
-	
+
 	/**
 	 * Registers an {@link Aggregator} for the iteration together with a {@link ConvergenceCriterion}. For a general description
 	 * of aggregators, see {@link #registerAggregator(String, Aggregator)} and {@link Aggregator}.
@@ -119,33 +119,32 @@ public class IterativeDataSet<T> extends SingleInputOperator<T, T, IterativeData
 	 * the iteration should terminate. A typical use case is to have an aggregator that sums up the total error of change
 	 * in an iteration step and have to have a convergence criterion that signals termination as soon as the aggregate value
 	 * is below a certain threshold.
-	 * 
+	 *
 	 * @param name The name under which the aggregator is registered.
 	 * @param aggregator The aggregator class.
 	 * @param convergenceCheck The convergence criterion.
-	 * 
+	 *
 	 * @return The IterativeDataSet itself, to allow chaining function calls.
 	 */
 	@PublicEvolving
 	public <X extends Value> IterativeDataSet<T> registerAggregationConvergenceCriterion(
-			String name, Aggregator<X> aggregator, ConvergenceCriterion<X> convergenceCheck)
-	{
+			String name, Aggregator<X> aggregator, ConvergenceCriterion<X> convergenceCheck) {
 		this.aggregators.registerAggregationConvergenceCriterion(name, aggregator, convergenceCheck);
 		return this;
 	}
-	
+
 	/**
-	 * Gets the registry for aggregators. On the registry, one can add {@link Aggregator}s and an aggregator-based 
+	 * Gets the registry for aggregators. On the registry, one can add {@link Aggregator}s and an aggregator-based
 	 * {@link ConvergenceCriterion}. This method offers an alternative way to registering the aggregators via
-	 * {@link #registerAggregator(String, Aggregator)} and {@link #registerAggregationConvergenceCriterion(String, Aggregator, ConvergenceCriterion))}.
-	 * 
+	 * {@link #registerAggregator(String, Aggregator)} and {@link #registerAggregationConvergenceCriterion(String, Aggregator, ConvergenceCriterion)}.
+	 *
 	 * @return The registry for aggregators.
 	 */
 	@PublicEvolving
 	public AggregatorRegistry getAggregators() {
 		return aggregators;
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 
 	@Override