You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by gr...@apache.org on 2017/05/12 14:12:39 UTC

flink git commit: [hotfix] Minutiae

Repository: flink
Updated Branches:
  refs/heads/master c9ffa9d2b -> 423f4d65e


[hotfix] Minutiae


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/423f4d65
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/423f4d65
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/423f4d65

Branch: refs/heads/master
Commit: 423f4d65e3dd35482fdbc2fb7e34ec8f11243609
Parents: c9ffa9d
Author: Greg Hogan <co...@greghogan.com>
Authored: Thu Feb 2 13:23:04 2017 -0500
Committer: Greg Hogan <co...@greghogan.com>
Committed: Fri May 12 10:11:56 2017 -0400

----------------------------------------------------------------------
 .../org/apache/flink/client/CliFrontend.java    |  2 +-
 .../api/io/avro/AvroRecordInputFormatTest.java  | 26 +++++++----
 .../connectors/fs/SequenceFileWriter.java       |  2 +-
 .../common/accumulators/AccumulatorHelper.java  |  2 +-
 .../flink/api/java/typeutils/TypeExtractor.java |  2 +-
 .../AbstractGenericArraySerializerTest.java     | 16 +++----
 .../AbstractGenericTypeComparatorTest.java      |  6 +--
 .../AbstractGenericTypeSerializerTest.java      |  2 +-
 .../typeutils/runtime/kryo/SerializersTest.java |  2 +-
 .../apache/flink/types/BasicTypeInfoTest.java   |  9 ++--
 .../flink/api/java/operators/Grouping.java      |  2 +-
 .../java/functions/SelectByFunctionsTest.java   | 18 ++++----
 .../java/functions/SemanticPropUtilTest.java    | 32 ++++++-------
 .../traversals/GraphCreatingVisitor.java        |  4 +-
 .../flink/runtime/util/SerializedThrowable.java | 10 ++---
 .../scheduler/SchedulerSlotSharingTest.java     |  2 +-
 .../api/operator/SelectByFunctionTest.scala     | 18 ++++----
 .../operators/AbstractStreamOperatorTest.java   | 47 ++++++++++----------
 .../runtime/io/BarrierTrackerTest.java          |  2 +-
 .../test/accumulators/AccumulatorITCase.java    |  2 +-
 20 files changed, 107 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
----------------------------------------------------------------------
diff --git a/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java b/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
index 74d5f5d..62fa402 100644
--- a/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
+++ b/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
@@ -860,7 +860,7 @@ public class CliFrontend {
 			Map<String, Object> accumulatorsResult = execResult.getAllAccumulatorResults();
 			if (accumulatorsResult.size() > 0) {
 				System.out.println("Accumulator Results: ");
-				System.out.println(AccumulatorHelper.getResultsFormated(accumulatorsResult));
+				System.out.println(AccumulatorHelper.getResultsFormatted(accumulatorsResult));
 			}
 		} else {
 			logAndSysout("Job has been submitted with JobID " + result.getJobID());

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/AvroRecordInputFormatTest.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/AvroRecordInputFormatTest.java b/flink-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/AvroRecordInputFormatTest.java
index 91a9612..3b6ad63 100644
--- a/flink-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/AvroRecordInputFormatTest.java
+++ b/flink-connectors/flink-avro/src/test/java/org/apache/flink/api/io/avro/AvroRecordInputFormatTest.java
@@ -18,14 +18,6 @@
 
 package org.apache.flink.api.io.avro;
 
-import static org.junit.Assert.*;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileReader;
 import org.apache.avro.file.DataFileWriter;
@@ -59,6 +51,22 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
 /**
  * Test the avro input format.
  * (The testcase is mostly the getting started tutorial of avro)
@@ -273,7 +281,7 @@ public class AvroRecordInputFormatTest {
 		DatumReader<GenericData.Record> datumReader = new GenericDatumReader<>(userSchema);
 
 		try (FileReader<GenericData.Record> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
-			// initialize Record by reading it from disk (thats easier than creating it by hand)
+			// initialize Record by reading it from disk (that's easier than creating it by hand)
 			GenericData.Record rec = new GenericData.Record(userSchema);
 			dataFileReader.next(rec);
 			

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/SequenceFileWriter.java
----------------------------------------------------------------------
diff --git a/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/SequenceFileWriter.java b/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/SequenceFileWriter.java
index 08c0d0a..32cadec 100644
--- a/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/SequenceFileWriter.java
+++ b/flink-connectors/flink-connector-filesystem/src/main/java/org/apache/flink/streaming/connectors/fs/SequenceFileWriter.java
@@ -38,7 +38,7 @@ import java.io.IOException;
 /**
  * A {@link Writer} that writes the bucket files as Hadoop {@link SequenceFile SequenceFiles}.
  * The input to the {@link BucketingSink} must
- * be a {@link org.apache.flink.api.java.tuple.Tuple2} of two Hadopo
+ * be a {@link org.apache.flink.api.java.tuple.Tuple2} of two Hadoop
  * {@link org.apache.hadoop.io.Writable Writables}.
  *
  * @param <K> The type of the first tuple field.

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java b/flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java
index 1a87235..3282302 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java
@@ -112,7 +112,7 @@ public class AccumulatorHelper {
 		return resultMap;
 	}
 
-	public static String getResultsFormated(Map<String, Object> map) {
+	public static String getResultsFormatted(Map<String, Object> map) {
 		StringBuilder builder = new StringBuilder();
 		for (Map.Entry<String, Object> entry : map.entrySet()) {
 			builder

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
index f1bf957..112ca38 100644
--- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
+++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/TypeExtractor.java
@@ -394,7 +394,7 @@ public class TypeExtractor {
 	 * @param inputTypeArgumentIndex Index of the type argument of function's first parameter
 	 *                               specifying the input type if it is wrapped (Iterable, Map,
 	 *                               etc.). Otherwise -1.
-	 * @param outputTypeArgumentIndex Index of the type argument of functions second parameter
+	 * @param outputTypeArgumentIndex Index of the type argument of function's second parameter
 	 *                                specifying the output type if it is wrapped in a Collector.
 	 *                                Otherwise -1.
 	 * @param inType Type of the input elements (In case of an iterable, it is the element type)

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericArraySerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericArraySerializerTest.java b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericArraySerializerTest.java
index 307a3d2..7386e6e 100644
--- a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericArraySerializerTest.java
+++ b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericArraySerializerTest.java
@@ -18,12 +18,6 @@
 
 package org.apache.flink.api.java.typeutils.runtime;
 
-import java.lang.reflect.Array;
-import java.util.ArrayList;
-import java.util.Random;
-
-import org.junit.Assert;
-import org.junit.Test;
 import org.apache.flink.api.common.typeutils.SerializerTestInstance;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.api.common.typeutils.base.GenericArraySerializer;
@@ -34,6 +28,12 @@ import org.apache.flink.api.java.typeutils.runtime.AbstractGenericTypeSerializer
 import org.apache.flink.api.java.typeutils.runtime.AbstractGenericTypeSerializerTest.ComplexNestedObject2;
 import org.apache.flink.api.java.typeutils.runtime.AbstractGenericTypeSerializerTest.SimpleTypes;
 import org.apache.flink.util.StringUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.lang.reflect.Array;
+import java.util.ArrayList;
+import java.util.Random;
 
 public abstract class AbstractGenericArraySerializerTest {
 	
@@ -110,11 +110,11 @@ public abstract class AbstractGenericArraySerializerTest {
 	@Test
 	public void testBeanStyleObjects() {
 		{
-			Book b1 = new Book(976243875L, "The Serialization Odysse", 42);
+			Book b1 = new Book(976243875L, "The Serialization Odyssey", 42);
 			Book b2 = new Book(0L, "Debugging byte streams", 1337);
 			Book b3 = new Book(-1L, "Low level interfaces", 0xC0FFEE);
 			Book b4 = new Book(Long.MAX_VALUE, "The joy of bits and bytes", 0xDEADBEEF);
-			Book b5 = new Book(Long.MIN_VALUE, "Winnign a prize for creative test strings", 0xBADF00);
+			Book b5 = new Book(Long.MIN_VALUE, "Winning a prize for creative test strings", 0xBADF00);
 			Book b6 = new Book(-2L, "Distributed Systems", 0xABCDEF0123456789L);
 			
 			runTests(	new Book[] {b1, b2},

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java
----------------------------------------------------------------------
diff --git a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java
index ef168d8..44464a3 100644
--- a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java
+++ b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java
@@ -30,16 +30,14 @@ abstract public class AbstractGenericTypeComparatorTest {
 
 	@Test
 	public void testString() {
-		runTests(new String[]{
-				"",
+		runTests("",
 				"Lorem Ipsum Dolor Omit Longer",
 				"aaaa",
 				"abcd",
 				"abce",
 				"abdd",
 				"accd",
-				"bbcd"
-		});
+				"bbcd");
 	}
 
 	@Test

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java
index 059c78d..a75208a 100644
--- a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java
+++ b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeSerializerTest.java
@@ -88,7 +88,7 @@ abstract public class AbstractGenericTypeSerializerTest {
 	@Test
 	public void testBeanStyleObjects() {
 		{
-			Book b1 = new Book(976243875L, "The Serialization Odysse", 42);
+			Book b1 = new Book(976243875L, "The Serialization Odyssey", 42);
 			Book b2 = new Book(0L, "Debugging byte streams", 1337);
 			Book b3 = new Book(-1L, "Low level interfaces", 0xC0FFEE);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/SerializersTest.java
----------------------------------------------------------------------
diff --git a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/SerializersTest.java b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/SerializersTest.java
index 7c6d023..4a39f8f 100644
--- a/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/SerializersTest.java
+++ b/flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/SerializersTest.java
@@ -46,7 +46,7 @@ public class SerializersTest {
 	
 	public static class Nested1 {
 		private FromNested fromNested;
-		private Path yodaIntervall;
+		private Path yodaInterval;
 	}
 
 	public static class ClassWithNested {

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-core/src/test/java/org/apache/flink/types/BasicTypeInfoTest.java
----------------------------------------------------------------------
diff --git a/flink-core/src/test/java/org/apache/flink/types/BasicTypeInfoTest.java b/flink-core/src/test/java/org/apache/flink/types/BasicTypeInfoTest.java
index c090b76..5707701 100644
--- a/flink-core/src/test/java/org/apache/flink/types/BasicTypeInfoTest.java
+++ b/flink-core/src/test/java/org/apache/flink/types/BasicTypeInfoTest.java
@@ -11,22 +11,23 @@
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WNTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 
 package org.apache.flink.types;
 
-import java.math.BigDecimal;
-import java.math.BigInteger;
 import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
 import org.apache.flink.util.TestLogger;
 import org.junit.Test;
 
+import java.math.BigDecimal;
+import java.math.BigInteger;
 import java.util.Date;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
 
 public class BasicTypeInfoTest extends TestLogger {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java b/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
index 8601c33..dbaaa9d 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/operators/Grouping.java
@@ -29,7 +29,7 @@ import org.apache.flink.api.java.DataSet;
  * Grouping is an intermediate step for a transformation on a grouped DataSet.<br>
  * The following transformation can be applied on Grouping:
  * <ul>
- * 	<li>{@link UnsortedGrouping#reduce(org.apache.flink.api.common.functions.ReduceFunction)},</li>
+ * <li>{@link UnsortedGrouping#reduce(org.apache.flink.api.common.functions.ReduceFunction)},</li>
  * <li>{@link UnsortedGrouping#reduceGroup(org.apache.flink.api.common.functions.GroupReduceFunction)}, and</li>
  * <li>{@link UnsortedGrouping#aggregate(org.apache.flink.api.java.aggregation.Aggregations, int)}.</li>
  * </ul>

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-java/src/test/java/org/apache/flink/api/java/functions/SelectByFunctionsTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/functions/SelectByFunctionsTest.java b/flink-java/src/test/java/org/apache/flink/api/java/functions/SelectByFunctionsTest.java
index f40c0ca..52b59ec 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/functions/SelectByFunctionsTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/functions/SelectByFunctionsTest.java
@@ -49,7 +49,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(smaller, bigger));
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, smaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -67,7 +67,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMax must return the first given tuple", specialCaseBigger, maxByTuple.reduce(specialCaseBigger, bigger));
 			Assert.assertSame("SelectByMax must return the first given tuple", bigger, maxByTuple.reduce(bigger, specialCaseBigger));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -82,7 +82,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(specialCaseBigger, bigger));
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, specialCaseBigger));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -97,7 +97,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(smaller, bigger));
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, smaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -112,7 +112,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMax must return bigger tuple", bigger, maxByTuple.reduce(bigger, bigger));
 			Assert.assertSame("SelectByMax must return smaller tuple", smaller, maxByTuple.reduce(smaller, smaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -129,7 +129,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, bigger));
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(bigger, smaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -145,7 +145,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMin must return the first given tuple", specialCaseBigger, minByTuple.reduce(specialCaseBigger, bigger));
 			Assert.assertSame("SelectByMin must return the first given tuple", bigger, minByTuple.reduce(bigger, specialCaseBigger));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -161,7 +161,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(specialCaseSmaller, smaller));
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, specialCaseSmaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	
@@ -176,7 +176,7 @@ public class SelectByFunctionsTest {
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(smaller, bigger));
 			Assert.assertSame("SelectByMin must return smaller tuple", smaller, minByTuple.reduce(bigger, smaller));
 		} catch (Exception e) {
-			Assert.fail("No exception should be thrown while comapring both tuples");
+			Assert.fail("No exception should be thrown while comparing both tuples");
 		}
 	}
 	

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-java/src/test/java/org/apache/flink/api/java/functions/SemanticPropUtilTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/functions/SemanticPropUtilTest.java b/flink-java/src/test/java/org/apache/flink/api/java/functions/SemanticPropUtilTest.java
index b845e73..453c022 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/functions/SemanticPropUtilTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/functions/SemanticPropUtilTest.java
@@ -324,7 +324,7 @@ public class SemanticPropUtilTest {
 	// --------------------------------------------------------------------------------------------
 
 	@Test
-	public void testForwardedNoArrrowIndividualStrings() {
+	public void testForwardedNoArrowIndividualStrings() {
 		String[] forwardedFields = {"f2","f3","f0"};
 		SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
@@ -335,7 +335,7 @@ public class SemanticPropUtilTest {
 	}
 
 	@Test
-	public void testForwardedNoArrrowOneString() {
+	public void testForwardedNoArrowOneString() {
 		String[] forwardedFields = {"f2;f3;f0"};
 		SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
@@ -360,7 +360,7 @@ public class SemanticPropUtilTest {
 	}
 
 	@Test
-	public void testForwardedNoArrrowSpaces() {
+	public void testForwardedNoArrowSpaces() {
 		String[] forwardedFields = {"  f2  ;   f3  ;  f0   "};
 		SingleInputSemanticProperties sp = new SingleInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsSingleFromString(sp, forwardedFields, null, null, fiveIntTupleType, fiveIntTupleType);
@@ -1219,11 +1219,11 @@ public class SemanticPropUtilTest {
 
 	@Test
 	public void testNonForwardedDual() {
-		String[] nonNorwardedFieldsFirst = { "f1;f2" };
-		String[] nonNorwardedFieldsSecond = { "f0" };
+		String[] nonForwardedFieldsFirst = { "f1;f2" };
+		String[] nonForwardedFieldsSecond = { "f0" };
 		DualInputSemanticProperties dsp = new DualInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
-				nonNorwardedFieldsFirst, nonNorwardedFieldsSecond, null, null, threeIntTupleType, threeIntTupleType, threeIntTupleType);
+				nonForwardedFieldsFirst, nonForwardedFieldsSecond, null, null, threeIntTupleType, threeIntTupleType, threeIntTupleType);
 
 		assertTrue(dsp.getForwardingTargetFields(0, 0).contains(0));
 		assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
@@ -1232,11 +1232,11 @@ public class SemanticPropUtilTest {
 		assertTrue(dsp.getForwardingTargetFields(1, 1).contains(1));
 		assertTrue(dsp.getForwardingTargetFields(1, 2).contains(2));
 
-		nonNorwardedFieldsFirst[0] = "f1";
-		nonNorwardedFieldsSecond[0] = "";
+		nonForwardedFieldsFirst[0] = "f1";
+		nonForwardedFieldsSecond[0] = "";
 		dsp = new DualInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
-				nonNorwardedFieldsFirst, null, null, null, threeIntTupleType, fiveIntTupleType, threeIntTupleType);
+				nonForwardedFieldsFirst, null, null, null, threeIntTupleType, fiveIntTupleType, threeIntTupleType);
 
 		assertTrue(dsp.getForwardingTargetFields(0, 0).contains(0));
 		assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
@@ -1245,11 +1245,11 @@ public class SemanticPropUtilTest {
 		assertTrue(dsp.getForwardingTargetFields(1, 1).size() == 0);
 		assertTrue(dsp.getForwardingTargetFields(1, 2).size() == 0);
 
-		nonNorwardedFieldsFirst[0] = "";
-		nonNorwardedFieldsSecond[0] = "f2;f0";
+		nonForwardedFieldsFirst[0] = "";
+		nonForwardedFieldsSecond[0] = "f2;f0";
 		dsp = new DualInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
-				null, nonNorwardedFieldsSecond, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
+				null, nonForwardedFieldsSecond, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
 
 		assertTrue(dsp.getForwardingTargetFields(0, 0).size() == 0);
 		assertTrue(dsp.getForwardingTargetFields(0, 1).size() == 0);
@@ -1283,19 +1283,19 @@ public class SemanticPropUtilTest {
 	@Test(expected = InvalidSemanticAnnotationException.class)
 	public void testNonForwardedDualInvalidTypes1() {
 
-		String[] nonNorwardedFieldsFirst = { "f1" };
+		String[] nonForwardedFieldsFirst = { "f1" };
 		DualInputSemanticProperties dsp = new DualInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
-				nonNorwardedFieldsFirst, null, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
+				nonForwardedFieldsFirst, null, null, null, fiveIntTupleType, threeIntTupleType, threeIntTupleType);
 	}
 
 	@Test(expected = InvalidSemanticAnnotationException.class)
 	public void testNonForwardedDualInvalidTypes2() {
 
-		String[] nonNorwardedFieldsSecond = { "f1" };
+		String[] nonForwardedFieldsSecond = { "f1" };
 		DualInputSemanticProperties dsp = new DualInputSemanticProperties();
 		SemanticPropUtil.getSemanticPropsDualFromString(dsp, null, null,
-				null, nonNorwardedFieldsSecond, null, null, threeIntTupleType, pojoInTupleType, threeIntTupleType);
+				null, nonForwardedFieldsSecond, null, null, threeIntTupleType, pojoInTupleType, threeIntTupleType);
 	}
 
 	@Test

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/GraphCreatingVisitor.java
----------------------------------------------------------------------
diff --git a/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/GraphCreatingVisitor.java b/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/GraphCreatingVisitor.java
index 3f3eae1..92217b5 100644
--- a/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/GraphCreatingVisitor.java
+++ b/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/GraphCreatingVisitor.java
@@ -76,8 +76,8 @@ import java.util.Map;
 /**
  * This traversal creates the optimizer DAG from a program.
  * It works as a visitor that walks the program's flow in a depth-first fashion, starting from the data sinks.
- * During the descend, it creates an optimizer node for each operator, respectively data source or -sink.
- * During the ascend, it connects the nodes to the full graph.
+ * During the descent it creates an optimizer node for each operator, respectively data source or sink.
+ * During the ascent it connects the nodes to the full graph.
  */
 public class GraphCreatingVisitor implements Visitor<Operator<?>> {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-runtime/src/main/java/org/apache/flink/runtime/util/SerializedThrowable.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/util/SerializedThrowable.java b/flink-runtime/src/main/java/org/apache/flink/runtime/util/SerializedThrowable.java
index f6f08e4..63f4363 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/util/SerializedThrowable.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/util/SerializedThrowable.java
@@ -48,7 +48,7 @@ public class SerializedThrowable extends Exception implements Serializable {
 	private final String originalErrorClassName;
 	
 	/** The original stack trace, to be printed */
-	private final String fullStingifiedStackTrace;
+	private final String fullStringifiedStackTrace;
 
 	/** The original exception, not transported via serialization, 
 	 * because the class may not be part of the system class loader.
@@ -83,7 +83,7 @@ public class SerializedThrowable extends Exception implements Serializable {
 
 			// record the original exception's properties (name, stack prints)
 			this.originalErrorClassName = exception.getClass().getName();
-			this.fullStingifiedStackTrace = ExceptionUtils.stringifyException(exception);
+			this.fullStringifiedStackTrace = ExceptionUtils.stringifyException(exception);
 
 			// mimic the original exception's stack trace
 			setStackTrace(exception.getStackTrace());
@@ -106,7 +106,7 @@ public class SerializedThrowable extends Exception implements Serializable {
 			SerializedThrowable other = (SerializedThrowable) exception;
 			this.serializedException = other.serializedException;
 			this.originalErrorClassName = other.originalErrorClassName;
-			this.fullStingifiedStackTrace = other.fullStingifiedStackTrace;
+			this.fullStringifiedStackTrace = other.fullStringifiedStackTrace;
 			this.cachedException = other.cachedException;
 			this.setStackTrace(other.getStackTrace());
 			this.initCause(other.getCause());
@@ -145,13 +145,13 @@ public class SerializedThrowable extends Exception implements Serializable {
 
 	@Override
 	public void printStackTrace(PrintStream s) {
-		s.print(fullStingifiedStackTrace);
+		s.print(fullStringifiedStackTrace);
 		s.flush();
 	}
 	
 	@Override
 	public void printStackTrace(PrintWriter s) {
-		s.print(fullStingifiedStackTrace);
+		s.print(fullStringifiedStackTrace);
 		s.flush();
 	}
 	

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/scheduler/SchedulerSlotSharingTest.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/scheduler/SchedulerSlotSharingTest.java b/flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/scheduler/SchedulerSlotSharingTest.java
index 5238e95..c049593 100644
--- a/flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/scheduler/SchedulerSlotSharingTest.java
+++ b/flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/scheduler/SchedulerSlotSharingTest.java
@@ -320,7 +320,7 @@ public class SchedulerSlotSharingTest {
 	}
 	
 	@Test
-	public void allocateSlotWithTemprarilyEmptyVertexGroup() {
+	public void allocateSlotWithTemporarilyEmptyVertexGroup() {
 		try {
 			JobVertexID jid1 = new JobVertexID();
 			JobVertexID jid2 = new JobVertexID();

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala
----------------------------------------------------------------------
diff --git a/flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala b/flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala
index 291df79..d6af6bd 100644
--- a/flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala
+++ b/flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala
@@ -54,7 +54,7 @@ class SelectByFunctionTest {
           bigger, maxByTuple.reduce(bigger, smaller))
       } catch {
         case e : Exception =>
-          Assert.fail("No exception should be thrown while comapring both tuples")
+          Assert.fail("No exception should be thrown while comparing both tuples")
       }
   }
 
@@ -77,7 +77,7 @@ class SelectByFunctionTest {
         bigger, maxByTuple.reduce(bigger, specialCaseBigger))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown " +
-        "while comapring both tuples")
+        "while comparing both tuples")
     }
   }
 
@@ -95,7 +95,7 @@ class SelectByFunctionTest {
         bigger, maxByTuple.reduce(bigger, specialCaseBigger))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown" +
-        " while comapring both tuples")
+        " while comparing both tuples")
     }
   }
 
@@ -113,7 +113,7 @@ class SelectByFunctionTest {
         bigger, maxByTuple.reduce(bigger, smaller))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown " +
-        "while comapring both tuples")
+        "while comparing both tuples")
     }
   }
 
@@ -132,7 +132,7 @@ class SelectByFunctionTest {
         smaller, maxByTuple.reduce(smaller, smaller))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown" +
-        " while comapring both tuples")
+        " while comparing both tuples")
     }
   }
 
@@ -153,7 +153,7 @@ class SelectByFunctionTest {
         smaller, minByTuple.reduce(bigger, smaller))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown " +
-        "while comapring both tuples")
+        "while comparing both tuples")
     }
   }
 
@@ -174,7 +174,7 @@ class SelectByFunctionTest {
         bigger, minByTuple.reduce(bigger, specialCaseBigger))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown " +
-        "while comapring both tuples")
+        "while comparing both tuples")
     }
   }
 
@@ -195,7 +195,7 @@ class SelectByFunctionTest {
         smaller, minByTuple.reduce(smaller, specialCaseSmaller))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown" +
-        " while comapring both tuples")
+        " while comparing both tuples")
     }
   }
 
@@ -213,7 +213,7 @@ class SelectByFunctionTest {
         smaller, minByTuple.reduce(bigger, smaller))
     } catch {
       case e : Exception => Assert.fail("No exception should be thrown" +
-        " while comapring both tuples")
+        " while comparing both tuples")
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java
----------------------------------------------------------------------
diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java
index 774ab4a..67004ea 100644
--- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java
+++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/AbstractStreamOperatorTest.java
@@ -17,28 +17,6 @@
  */
 package org.apache.flink.streaming.api.operators;
 
-import static junit.framework.TestCase.assertTrue;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.contains;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.verify;
-import static org.powermock.api.mockito.PowerMockito.doReturn;
-import static org.powermock.api.mockito.PowerMockito.mock;
-import static org.powermock.api.mockito.PowerMockito.spy;
-import static org.powermock.api.mockito.PowerMockito.when;
-import static org.powermock.api.mockito.PowerMockito.whenNew;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-import java.util.concurrent.RunnableFuture;
 import org.apache.flink.api.common.state.ValueStateDescriptor;
 import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
 import org.apache.flink.api.common.typeutils.base.IntSerializer;
@@ -71,6 +49,29 @@ import org.mockito.internal.util.reflection.Whitebox;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.RunnableFuture;
+
+import static junit.framework.TestCase.assertTrue;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.verify;
+import static org.powermock.api.mockito.PowerMockito.doReturn;
+import static org.powermock.api.mockito.PowerMockito.mock;
+import static org.powermock.api.mockito.PowerMockito.spy;
+import static org.powermock.api.mockito.PowerMockito.when;
+import static org.powermock.api.mockito.PowerMockito.whenNew;
+
 /**
  * Tests for the facilities provided by {@link AbstractStreamOperator}. This mostly
  * tests timers and state and whether they are correctly checkpointed/restored
@@ -510,7 +511,7 @@ public class AbstractStreamOperatorTest {
 
 	/**
 	 * Tests that the created StateSnapshotContextSynchronousImpl is closed in case of a failing
-	 * Operator#snapshotState(StaetSnapshotContextSynchronousImpl) call.
+	 * Operator#snapshotState(StateSnapshotContextSynchronousImpl) call.
 	 */
 	@Test
 	public void testFailingSnapshotMethod() throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/BarrierTrackerTest.java
----------------------------------------------------------------------
diff --git a/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/BarrierTrackerTest.java b/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/BarrierTrackerTest.java
index cbcf45d..8c66205 100644
--- a/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/BarrierTrackerTest.java
+++ b/flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/BarrierTrackerTest.java
@@ -526,7 +526,7 @@ public class BarrierTrackerTest {
 
 			final long expectedId = checkpointIDs[i++];
 			if (expectedId < 0) {
-				assertEquals("wrong checkpoint id for checkoint abort", -expectedId, checkpointId);
+				assertEquals("wrong checkpoint id for checkpoint abort", -expectedId, checkpointId);
 			} else {
 				fail("got 'abortCheckpointOnBarrier()' when expecting an 'triggerCheckpointOnBarrier()'");
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/423f4d65/flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java b/flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java
index b4015e5..5f2b0a9 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/accumulators/AccumulatorITCase.java
@@ -74,7 +74,7 @@ public class AccumulatorITCase extends JavaProgramTestBase {
 		// Test accumulator results
 		System.out.println("Accumulator results:");
 		JobExecutionResult res = this.result;
-		System.out.println(AccumulatorHelper.getResultsFormated(res.getAllAccumulatorResults()));
+		System.out.println(AccumulatorHelper.getResultsFormatted(res.getAllAccumulatorResults()));
 
 		Assert.assertEquals(Integer.valueOf(3), (Integer) res.getAccumulatorResult("num-lines"));