You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ch...@apache.org on 2017/05/26 19:16:10 UTC

[01/15] flink git commit: [FLINK-6721] Activate strict checkstyle for flink-fs-tests

Repository: flink
Updated Branches:
  refs/heads/master 63f182a4f -> 77b0fb9fe


[FLINK-6721] Activate strict checkstyle for flink-fs-tests

This closes #3991.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/6445da02
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/6445da02
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/6445da02

Branch: refs/heads/master
Commit: 6445da02b3785e239debbb935ea4e284d751d934
Parents: 63f182a
Author: zentol <ch...@apache.org>
Authored: Thu May 25 21:30:32 2017 +0200
Committer: zentol <ch...@apache.org>
Committed: Fri May 26 17:41:31 2017 +0200

----------------------------------------------------------------------
 flink-fs-tests/pom.xml                          | 35 ++++++++++++++++
 ...inuousFileProcessingFrom11MigrationTest.java | 13 +++---
 ...inuousFileProcessingFrom12MigrationTest.java | 15 ++++---
 .../ContinuousFileProcessingITCase.java         | 17 +++++---
 .../hdfstests/ContinuousFileProcessingTest.java | 21 +++++-----
 .../flink/hdfstests/FileStateBackendTest.java   | 43 +++++++++++---------
 .../FsNegativeRunningJobsRegistryTest.java      | 27 ++++++------
 .../org/apache/flink/hdfstests/HDFSTest.java    | 31 +++++++-------
 .../src/test/resources/log4j-test.properties    |  2 +-
 9 files changed, 126 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/pom.xml
----------------------------------------------------------------------
diff --git a/flink-fs-tests/pom.xml b/flink-fs-tests/pom.xml
index 446d167..972a452 100644
--- a/flink-fs-tests/pom.xml
+++ b/flink-fs-tests/pom.xml
@@ -110,6 +110,41 @@ under the License.
 					</environmentVariables>
 				</configuration>
 			</plugin>
+
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<dependencies>
+					<dependency>
+						<groupId>com.puppycrawl.tools</groupId>
+						<artifactId>checkstyle</artifactId>
+						<version>6.19</version>
+					</dependency>
+				</dependencies>
+				<configuration>
+					<configLocation>/tools/maven/strict-checkstyle.xml</configLocation>
+					<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+					<logViolationsToConsole>true</logViolationsToConsole>
+					<failOnViolation>true</failOnViolation>
+				</configuration>
+				<executions>
+					<!--
+					Execute checkstyle after compilation but before tests.
+
+					This ensures that any parsing or type checking errors are from
+					javac, so they look as expected. Beyond that, we want to
+					fail as early as possible.
+					-->
+					<execution>
+						<phase>test-compile</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
 		</plugins>
 	</build>
 

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom11MigrationTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom11MigrationTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom11MigrationTest.java
index ec5e1ad..05f4916 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom11MigrationTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom11MigrationTest.java
@@ -40,6 +40,7 @@ import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
 import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
 import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
 import org.apache.flink.util.Preconditions;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -56,6 +57,9 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URL;
 
+/**
+ * Tests that verify the migration from 1.1 snapshots.
+ */
 public class ContinuousFileProcessingFrom11MigrationTest {
 
 	private static final int NO_OF_FILES = 5;
@@ -87,10 +91,10 @@ public class ContinuousFileProcessingFrom11MigrationTest {
 			MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
 			hdfsCluster = builder.build();
 
-			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() +"/";
+			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";
 			hdfs = new org.apache.hadoop.fs.Path(hdfsURI).getFileSystem(hdConf);
 
-		} catch(Throwable e) {
+		} catch (Throwable e) {
 			e.printStackTrace();
 			Assert.fail("Test failed " + e.getMessage());
 		}
@@ -167,7 +171,6 @@ public class ContinuousFileProcessingFrom11MigrationTest {
 		TimestampedFileInputSplit split4 =
 			new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null);
 
-
 		final OneShotLatch latch = new OneShotLatch();
 
 		BlockingFileInputFormat format = new BlockingFileInputFormat(latch, new Path(hdfsURI));
@@ -347,7 +350,7 @@ public class ContinuousFileProcessingFrom11MigrationTest {
 
 	///////////				Source Contexts Used by the tests				/////////////////
 
-	private static abstract class DummySourceContext
+	private abstract static class DummySourceContext
 		implements SourceFunction.SourceContext<TimestampedFileInputSplit> {
 
 		private final Object lock = new Object();
@@ -388,7 +391,7 @@ public class ContinuousFileProcessingFrom11MigrationTest {
 		FSDataOutputStream stream = hdfs.create(tmp);
 		StringBuilder str = new StringBuilder();
 		for (int i = 0; i < LINES_PER_FILE; i++) {
-			String line = fileIdx +": "+ sampleLine + " " + i +"\n";
+			String line = fileIdx + ": " + sampleLine + " " + i + "\n";
 			str.append(line);
 			stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom12MigrationTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom12MigrationTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom12MigrationTest.java
index bf09447..8490a62 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom12MigrationTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingFrom12MigrationTest.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.hdfstests;
 
-import java.io.FileOutputStream;
-import org.apache.commons.io.FileUtils;
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.io.FileInputFormat;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -42,7 +40,8 @@ import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
 import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
 import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
 import org.apache.flink.streaming.util.OperatorSnapshotUtil;
-import org.apache.flink.util.Preconditions;
+
+import org.apache.commons.io.FileUtils;
 import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Ignore;
@@ -50,8 +49,12 @@ import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 
+/**
+ * Tests that verify the migration from 1.2 snapshots.
+ */
 public class ContinuousFileProcessingFrom12MigrationTest {
 
 	private static final int LINES_PER_FILE = 10;
@@ -232,7 +235,7 @@ public class ContinuousFileProcessingFrom12MigrationTest {
 
 		OperatorSnapshotUtil.writeStateHandle(
 				snapshot,
-				"src/test/resources/monitoring-function-migration-test-" + fileModTime +"-flink1.2-snapshot");
+				"src/test/resources/monitoring-function-migration-test-" + fileModTime + "-flink1.2-snapshot");
 
 		monitoringFunction.cancel();
 		runner.join();
@@ -315,7 +318,7 @@ public class ContinuousFileProcessingFrom12MigrationTest {
 		}
 	}
 
-	private static abstract class DummySourceContext
+	private abstract static class DummySourceContext
 		implements SourceFunction.SourceContext<TimestampedFileInputSplit> {
 
 		private final Object lock = new Object();
@@ -352,7 +355,7 @@ public class ContinuousFileProcessingFrom12MigrationTest {
 		FileOutputStream stream = new FileOutputStream(tmp);
 		StringBuilder str = new StringBuilder();
 		for (int i = 0; i < LINES_PER_FILE; i++) {
-			String line = fileIdx +": "+ sampleLine + " " + i +"\n";
+			String line = fileIdx + ": " + sampleLine + " " + i + "\n";
 			str.append(line);
 			stream.write(line.getBytes());
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingITCase.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingITCase.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingITCase.java
index bc42838..42fddf5 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingITCase.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingITCase.java
@@ -15,8 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.hdfstests;
 
+import org.apache.flink.api.common.io.FilePathFilter;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.io.TextInputFormat;
 import org.apache.flink.api.java.tuple.Tuple2;
@@ -27,12 +29,12 @@ import org.apache.flink.core.fs.Path;
 import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
-import org.apache.flink.api.common.io.FilePathFilter;
 import org.apache.flink.streaming.api.functions.source.ContinuousFileMonitoringFunction;
 import org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperator;
-import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit;
 import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
+import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit;
 import org.apache.flink.streaming.util.StreamingProgramTestBase;
+
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -53,6 +55,9 @@ import java.util.Set;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * IT cases for the {@link ContinuousFileMonitoringFunction} and {@link ContinuousFileReaderOperator}.
+ */
 public class ContinuousFileProcessingITCase extends StreamingProgramTestBase {
 
 	private static final int NO_OF_FILES = 5;
@@ -84,10 +89,10 @@ public class ContinuousFileProcessingITCase extends StreamingProgramTestBase {
 			MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
 			hdfsCluster = builder.build();
 
-			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() +"/";
+			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";
 			hdfs = new org.apache.hadoop.fs.Path(hdfsURI).getFileSystem(hdConf);
 
-		} catch(Throwable e) {
+		} catch (Throwable e) {
 			e.printStackTrace();
 			Assert.fail("Test failed " + e.getMessage());
 		}
@@ -241,7 +246,7 @@ public class ContinuousFileProcessingITCase extends StreamingProgramTestBase {
 			}
 
 			if (!content.add(value + "\n")) {
-				Assert.fail("Duplicate line: "+ value);
+				Assert.fail("Duplicate line: " + value);
 				System.exit(0);
 			}
 
@@ -301,7 +306,7 @@ public class ContinuousFileProcessingITCase extends StreamingProgramTestBase {
 		return new Tuple2<>(tmp, str.toString());
 	}
 
-	public static class SuccessException extends Exception {
+	private static class SuccessException extends Exception {
 		private static final long serialVersionUID = -7011865671593955887L;
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java
index 19358e3..2fc00c4 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java
@@ -20,6 +20,7 @@ package org.apache.flink.hdfstests;
 
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.common.io.FileInputFormat;
+import org.apache.flink.api.common.io.FilePathFilter;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
 import org.apache.flink.api.java.io.TextInputFormat;
 import org.apache.flink.api.java.tuple.Tuple2;
@@ -28,14 +29,13 @@ import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.core.fs.FileInputSplit;
 import org.apache.flink.core.fs.Path;
-import org.apache.flink.api.common.io.FilePathFilter;
 import org.apache.flink.core.testutils.OneShotLatch;
 import org.apache.flink.streaming.api.TimeCharacteristic;
 import org.apache.flink.streaming.api.functions.source.ContinuousFileMonitoringFunction;
 import org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperator;
-import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit;
 import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
 import org.apache.flink.streaming.api.functions.source.SourceFunction;
+import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit;
 import org.apache.flink.streaming.api.operators.StreamSource;
 import org.apache.flink.streaming.api.watermark.Watermark;
 import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
@@ -43,6 +43,7 @@ import org.apache.flink.streaming.runtime.tasks.OperatorStateHandles;
 import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness;
 import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
 import org.apache.flink.util.Preconditions;
+
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -68,6 +69,9 @@ import java.util.TreeSet;
 import java.util.UUID;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
+/**
+ * Tests for the {@link ContinuousFileMonitoringFunction} and {@link ContinuousFileReaderOperator}.
+ */
 public class ContinuousFileProcessingTest {
 
 	private static final int NO_OF_FILES = 5;
@@ -94,10 +98,10 @@ public class ContinuousFileProcessingTest {
 			MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
 			hdfsCluster = builder.build();
 
-			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() +"/";
+			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";
 			hdfs = new org.apache.hadoop.fs.Path(hdfsURI).getFileSystem(hdConf);
 
-		} catch(Throwable e) {
+		} catch (Throwable e) {
 			e.printStackTrace();
 			Assert.fail("Test failed " + e.getMessage());
 		}
@@ -115,7 +119,7 @@ public class ContinuousFileProcessingTest {
 	@Test
 	public void testInvalidPathSpecification() throws Exception {
 
-		String invalidPath = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() +"/invalid/";
+		String invalidPath = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/invalid/";
 		TextInputFormat format = new TextInputFormat(new Path(invalidPath));
 
 		ContinuousFileMonitoringFunction<String> monitoringFunction =
@@ -409,7 +413,6 @@ public class ContinuousFileProcessingTest {
 		TimestampedFileInputSplit split4 =
 			new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null);
 
-
 		final OneShotLatch latch = new OneShotLatch();
 
 		BlockingFileInputFormat format = new BlockingFileInputFormat(latch, new Path(testBasePath));
@@ -756,7 +759,6 @@ public class ContinuousFileProcessingTest {
 	public void testFunctionRestore() throws Exception {
 		String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/";
 
-
 		org.apache.hadoop.fs.Path path = null;
 		long fileModTime = Long.MIN_VALUE;
 		for (int i = 0; i < 1; i++) {
@@ -910,8 +912,7 @@ public class ContinuousFileProcessingTest {
 		private int elementsBeforeNotifying = -1;
 		private int elementsBeforeCanceling = -1;
 
-		FileVerifyingSourceContext(OneShotLatch latch,
-								   ContinuousFileMonitoringFunction src) {
+		FileVerifyingSourceContext(OneShotLatch latch, ContinuousFileMonitoringFunction src) {
 			this(latch, src, -1, -1);
 		}
 
@@ -1041,7 +1042,7 @@ public class ContinuousFileProcessingTest {
 		FSDataOutputStream stream = hdfs.create(tmp);
 		StringBuilder str = new StringBuilder();
 		for (int i = 0; i < LINES_PER_FILE; i++) {
-			String line = fileIdx +": "+ sampleLine + " " + i +"\n";
+			String line = fileIdx + ": " + sampleLine + " " + i + "\n";
 			str.append(line);
 			stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FileStateBackendTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FileStateBackendTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FileStateBackendTest.java
index 7f8eea8..9958729 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FileStateBackendTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FileStateBackendTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.hdfstests;
 
-import org.apache.commons.io.FileUtils;
 import org.apache.flink.api.common.JobID;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.core.fs.FileStatus;
@@ -30,6 +29,8 @@ import org.apache.flink.runtime.state.StateBackendTestBase;
 import org.apache.flink.runtime.state.filesystem.FileStateHandle;
 import org.apache.flink.runtime.state.filesystem.FsStateBackend;
 import org.apache.flink.runtime.state.memory.ByteStreamStateHandle;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.AfterClass;
@@ -51,15 +52,18 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+/**
+ * Tests for the {@link FsStateBackend}.
+ */
 public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 
-	private static File TEMP_DIR;
+	private static File tempDir;
 
-	private static String HDFS_ROOT_URI;
+	private static String hdfsRootUri;
 
-	private static MiniDFSCluster HDFS_CLUSTER;
+	private static MiniDFSCluster hdfsCluster;
 
-	private static FileSystem FS;
+	private static FileSystem fs;
 
 	// ------------------------------------------------------------------------
 	//  startup / shutdown
@@ -68,17 +72,17 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 	@BeforeClass
 	public static void createHDFS() {
 		try {
-			TEMP_DIR = new File(ConfigConstants.DEFAULT_TASK_MANAGER_TMP_PATH, UUID.randomUUID().toString());
+			tempDir = new File(ConfigConstants.DEFAULT_TASK_MANAGER_TMP_PATH, UUID.randomUUID().toString());
 
 			Configuration hdConf = new Configuration();
-			hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEMP_DIR.getAbsolutePath());
+			hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, tempDir.getAbsolutePath());
 			MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
-			HDFS_CLUSTER = builder.build();
+			hdfsCluster = builder.build();
 
-			HDFS_ROOT_URI = "hdfs://" + HDFS_CLUSTER.getURI().getHost() + ":"
-					+ HDFS_CLUSTER.getNameNodePort() + "/";
+			hdfsRootUri = "hdfs://" + hdfsCluster.getURI().getHost() + ":"
+					+ hdfsCluster.getNameNodePort() + "/";
 
-			FS = FileSystem.get(new URI(HDFS_ROOT_URI));
+			fs = FileSystem.get(new URI(hdfsRootUri));
 		}
 		catch (Exception e) {
 			e.printStackTrace();
@@ -89,15 +93,15 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 	@AfterClass
 	public static void destroyHDFS() {
 		try {
-			HDFS_CLUSTER.shutdown();
-			FileUtils.deleteDirectory(TEMP_DIR);
+			hdfsCluster.shutdown();
+			FileUtils.deleteDirectory(tempDir);
 		}
 		catch (Exception ignored) {}
 	}
 
 	@Override
 	protected FsStateBackend getStateBackend() throws Exception {
-		URI stateBaseURI = new URI(HDFS_ROOT_URI + UUID.randomUUID().toString());
+		URI stateBaseURI = new URI(hdfsRootUri + UUID.randomUUID().toString());
 		return new FsStateBackend(stateBaseURI);
 
 	}
@@ -118,7 +122,7 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 	@Override
 	@Test
 	public void testReducingStateRestoreWithWrongSerializers() {}
-	
+
 	@Override
 	@Test
 	public void testMapStateRestoreWithWrongSerializers() {}
@@ -131,7 +135,6 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 			FsStateBackend backend = CommonTestUtils.createCopySerializable(new FsStateBackend(basePath, 15));
 			JobID jobId = new JobID();
 
-
 			CheckpointStreamFactory streamFactory = backend.createStreamFactory(jobId, "test_op");
 
 			// we know how FsCheckpointStreamFactory is implemented so we know where it
@@ -169,7 +172,7 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 			// use with try-with-resources
 			FileStateHandle handle4;
 			try (CheckpointStreamFactory.CheckpointStateOutputStream stream4 =
-						 streamFactory.createCheckpointStateOutputStream(checkpointId, System.currentTimeMillis())) {
+					streamFactory.createCheckpointStateOutputStream(checkpointId, System.currentTimeMillis())) {
 				stream4.write(state4);
 				handle4 = (FileStateHandle) stream4.closeAndGetHandle();
 			}
@@ -213,7 +216,7 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 
 	private static void ensureFileDeleted(Path path) {
 		try {
-			assertFalse(FS.exists(path));
+			assertFalse(fs.exists(path));
 		}
 		catch (IOException ignored) {}
 	}
@@ -224,7 +227,7 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 
 	private static boolean isDirectoryEmpty(Path directory) {
 		try {
-			FileStatus[] nested = FS.listStatus(directory);
+			FileStatus[] nested = fs.listStatus(directory);
 			return  nested == null || nested.length == 0;
 		}
 		catch (IOException e) {
@@ -233,7 +236,7 @@ public class FileStateBackendTest extends StateBackendTestBase<FsStateBackend> {
 	}
 
 	private static URI randomHdfsFileUri() {
-		String uriString = HDFS_ROOT_URI + UUID.randomUUID().toString();
+		String uriString = hdfsRootUri + UUID.randomUUID().toString();
 		try {
 			return new URI(uriString);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FsNegativeRunningJobsRegistryTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FsNegativeRunningJobsRegistryTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FsNegativeRunningJobsRegistryTest.java
index bb27b8b..6076c8f 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FsNegativeRunningJobsRegistryTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/FsNegativeRunningJobsRegistryTest.java
@@ -21,11 +21,10 @@ package org.apache.flink.hdfstests;
 import org.apache.flink.api.common.JobID;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.highavailability.FsNegativeRunningJobsRegistry;
-
 import org.apache.flink.runtime.highavailability.RunningJobsRegistry.JobSchedulingStatus;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -44,9 +43,9 @@ public class FsNegativeRunningJobsRegistryTest {
 	@ClassRule
 	public static final TemporaryFolder TEMP_DIR = new TemporaryFolder();
 
-	private static MiniDFSCluster HDFS_CLUSTER;
+	private static MiniDFSCluster hdfsCluster;
 
-	private static Path HDFS_ROOT_PATH;
+	private static Path hdfsRootPath;
 
 	// ------------------------------------------------------------------------
 	//  startup / shutdown
@@ -60,28 +59,28 @@ public class FsNegativeRunningJobsRegistryTest {
 		hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, tempDir.getAbsolutePath());
 
 		MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
-		HDFS_CLUSTER = builder.build();
+		hdfsCluster = builder.build();
 
-		HDFS_ROOT_PATH = new Path("hdfs://" + HDFS_CLUSTER.getURI().getHost() + ":"
-				+ HDFS_CLUSTER.getNameNodePort() + "/");
+		hdfsRootPath = new Path("hdfs://" + hdfsCluster.getURI().getHost() + ":"
+				+ hdfsCluster.getNameNodePort() + "/");
 	}
 
 	@AfterClass
 	public static void destroyHDFS() {
-		if (HDFS_CLUSTER != null) {
-			HDFS_CLUSTER.shutdown();
+		if (hdfsCluster != null) {
+			hdfsCluster.shutdown();
 		}
-		HDFS_CLUSTER = null;
-		HDFS_ROOT_PATH = null;
+		hdfsCluster = null;
+		hdfsRootPath = null;
 	}
 
 	// ------------------------------------------------------------------------
 	//  Tests
 	// ------------------------------------------------------------------------
-	
+
 	@Test
 	public void testCreateAndSetFinished() throws Exception {
-		final Path workDir = new Path(HDFS_ROOT_PATH, "test-work-dir");
+		final Path workDir = new Path(hdfsRootPath, "test-work-dir");
 		final JobID jid = new JobID();
 
 		FsNegativeRunningJobsRegistry registry = new FsNegativeRunningJobsRegistry(workDir);
@@ -106,7 +105,7 @@ public class FsNegativeRunningJobsRegistryTest {
 
 	@Test
 	public void testSetFinishedAndRunning() throws Exception {
-		final Path workDir = new Path(HDFS_ROOT_PATH, "änother_wörk_directörü");
+		final Path workDir = new Path(hdfsRootPath, "änother_wörk_directörü");
 		final JobID jid = new JobID();
 
 		FsNegativeRunningJobsRegistry registry = new FsNegativeRunningJobsRegistry(workDir);

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/HDFSTest.java
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/HDFSTest.java b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/HDFSTest.java
index 0815863..5f778f2 100644
--- a/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/HDFSTest.java
+++ b/flink-fs-tests/src/test/java/org/apache/flink/hdfstests/HDFSTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.hdfstests;
 
-import org.apache.commons.io.IOUtils;
 import org.apache.flink.api.common.io.FileOutputFormat;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.ExecutionEnvironmentFactory;
@@ -36,6 +35,8 @@ import org.apache.flink.runtime.blob.BlobUtils;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
 import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
 import org.apache.flink.util.FileUtils;
+
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -77,17 +78,17 @@ public class HDFSTest {
 			MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
 			hdfsCluster = builder.build();
 
-			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() +"/";
+			hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";
 
 			hdPath = new org.apache.hadoop.fs.Path("/test");
 			hdfs = hdPath.getFileSystem(hdConf);
 			FSDataOutputStream stream = hdfs.create(hdPath);
-			for(int i = 0; i < 10; i++) {
+			for (int i = 0; i < 10; i++) {
 				stream.write("Hello HDFS\n".getBytes(ConfigConstants.DEFAULT_CHARSET));
 			}
 			stream.close();
 
-		} catch(Throwable e) {
+		} catch (Throwable e) {
 			e.printStackTrace();
 			Assert.fail("Test failed " + e.getMessage());
 		}
@@ -112,23 +113,23 @@ public class HDFSTest {
 		try {
 			FileSystem fs = file.getFileSystem();
 			assertTrue("Must be HadoopFileSystem", fs instanceof HadoopFileSystem);
-			
+
 			DopOneTestEnvironment.setAsContext();
 			try {
 				WordCount.main(new String[]{
 						"--input", file.toString(),
 						"--output", result.toString()});
 			}
-			catch(Throwable t) {
+			catch (Throwable t) {
 				t.printStackTrace();
 				Assert.fail("Test failed with " + t.getMessage());
 			}
 			finally {
 				DopOneTestEnvironment.unsetAsContext();
 			}
-			
+
 			assertTrue("No result file present", hdfs.exists(result));
-			
+
 			// validate output:
 			org.apache.hadoop.fs.FSDataInputStream inStream = hdfs.open(result);
 			StringWriter writer = new StringWriter();
@@ -141,7 +142,7 @@ public class HDFSTest {
 
 		} catch (IOException e) {
 			e.printStackTrace();
-			Assert.fail("Error in test: " + e.getMessage() );
+			Assert.fail("Error in test: " + e.getMessage());
 		}
 	}
 
@@ -149,7 +150,7 @@ public class HDFSTest {
 	public void testAvroOut() {
 		String type = "one";
 		AvroOutputFormat<String> avroOut =
-				new AvroOutputFormat<String>( String.class );
+				new AvroOutputFormat<String>(String.class);
 
 		org.apache.hadoop.fs.Path result = new org.apache.hadoop.fs.Path(hdfsURI + "/avroTest");
 
@@ -166,11 +167,10 @@ public class HDFSTest {
 			avroOut.writeRecord(type);
 			avroOut.close();
 
-
 			assertTrue("No result file present", hdfs.exists(result));
 			FileStatus[] files = hdfs.listStatus(result);
 			Assert.assertEquals(2, files.length);
-			for(FileStatus file : files) {
+			for (FileStatus file : files) {
 				assertTrue("1.avro".equals(file.getPath().getName()) || "2.avro".equals(file.getPath().getName()));
 			}
 
@@ -249,9 +249,8 @@ public class HDFSTest {
 		}
 	}
 
-	// package visible
-	static abstract class DopOneTestEnvironment extends ExecutionEnvironment {
-		
+	abstract static class DopOneTestEnvironment extends ExecutionEnvironment {
+
 		public static void setAsContext() {
 			final LocalEnvironment le = new LocalEnvironment();
 			le.setParallelism(1);
@@ -264,7 +263,7 @@ public class HDFSTest {
 				}
 			});
 		}
-		
+
 		public static void unsetAsContext() {
 			resetContextEnvironment();
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/6445da02/flink-fs-tests/src/test/resources/log4j-test.properties
----------------------------------------------------------------------
diff --git a/flink-fs-tests/src/test/resources/log4j-test.properties b/flink-fs-tests/src/test/resources/log4j-test.properties
index f533ba2..e335df5 100644
--- a/flink-fs-tests/src/test/resources/log4j-test.properties
+++ b/flink-fs-tests/src/test/resources/log4j-test.properties
@@ -28,4 +28,4 @@ log4j.appender.testlogger.layout=org.apache.log4j.PatternLayout
 log4j.appender.testlogger.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
 
 # suppress the irrelevant (wrong) warnings from the netty channel handler
-log4j.logger.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, testlogger
\ No newline at end of file
+log4j.logger.org.jboss.netty.channel.DefaultChannelPipeline=ERROR, testlogger


[11/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java
index 8decb24..343ff70 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/LabelPropagationData.java
@@ -18,21 +18,21 @@
 
 package org.apache.flink.graph.examples.data;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.NullValue;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Provides the default data set used for the Label Propagation test program.
  * If no parameters are given to the program, the default edge data set is used.
  */
 public class LabelPropagationData {
-	
+
 	public static final String LABELS_AFTER_1_ITERATION = "1,10\n" +
 			"2,10\n" +
 			"3,10\n" +
@@ -41,7 +41,7 @@ public class LabelPropagationData {
 			"6,40\n" +
 			"7,40\n";
 
-	public static final String LABELS_WITH_TIE ="1,10\n" +
+	public static final String LABELS_WITH_TIE = "1,10\n" +
 			"2,10\n" +
 			"3,10\n" +
 			"4,10\n" +
@@ -56,13 +56,13 @@ public class LabelPropagationData {
 	public static final DataSet<Vertex<Long, Long>> getDefaultVertexSet(ExecutionEnvironment env) {
 
 		List<Vertex<Long, Long>> vertices = new ArrayList<Vertex<Long, Long>>();
-		vertices.add(new Vertex<Long, Long>(1l, 10l));
-		vertices.add(new Vertex<Long, Long>(2l, 10l));
-		vertices.add(new Vertex<Long, Long>(3l, 30l));
-		vertices.add(new Vertex<Long, Long>(4l, 40l));
-		vertices.add(new Vertex<Long, Long>(5l, 40l));
-		vertices.add(new Vertex<Long, Long>(6l, 40l));
-		vertices.add(new Vertex<Long, Long>(7l, 40l));
+		vertices.add(new Vertex<Long, Long>(1L, 10L));
+		vertices.add(new Vertex<Long, Long>(2L, 10L));
+		vertices.add(new Vertex<Long, Long>(3L, 30L));
+		vertices.add(new Vertex<Long, Long>(4L, 40L));
+		vertices.add(new Vertex<Long, Long>(5L, 40L));
+		vertices.add(new Vertex<Long, Long>(6L, 40L));
+		vertices.add(new Vertex<Long, Long>(7L, 40L));
 
 		return env.fromCollection(vertices);
 	}
@@ -83,15 +83,15 @@ public class LabelPropagationData {
 	public static final DataSet<Vertex<Long, Long>> getTieVertexSet(ExecutionEnvironment env) {
 
 		List<Vertex<Long, Long>> vertices = new ArrayList<Vertex<Long, Long>>();
-		vertices.add(new Vertex<Long, Long>(1l, 10l));
-		vertices.add(new Vertex<Long, Long>(2l, 10l));
-		vertices.add(new Vertex<Long, Long>(3l, 10l));
-		vertices.add(new Vertex<Long, Long>(4l, 10l));
-		vertices.add(new Vertex<Long, Long>(5l, 0l));
-		vertices.add(new Vertex<Long, Long>(6l, 20l));
-		vertices.add(new Vertex<Long, Long>(7l, 20l));
-		vertices.add(new Vertex<Long, Long>(8l, 20l));
-		vertices.add(new Vertex<Long, Long>(9l, 20l));
+		vertices.add(new Vertex<Long, Long>(1L, 10L));
+		vertices.add(new Vertex<Long, Long>(2L, 10L));
+		vertices.add(new Vertex<Long, Long>(3L, 10L));
+		vertices.add(new Vertex<Long, Long>(4L, 10L));
+		vertices.add(new Vertex<Long, Long>(5L, 0L));
+		vertices.add(new Vertex<Long, Long>(6L, 20L));
+		vertices.add(new Vertex<Long, Long>(7L, 20L));
+		vertices.add(new Vertex<Long, Long>(8L, 20L));
+		vertices.add(new Vertex<Long, Long>(9L, 20L));
 
 		return env.fromCollection(vertices);
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java
index e4c98fe..df139f0 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/MusicProfilesData.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.graph.examples.data;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.tuple.Tuple3;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Provides the default data sets used for the Music Profiles example program.
  * If no parameters are given to the program, the default data sets are used.
@@ -33,17 +33,17 @@ public class MusicProfilesData {
 
 	public static DataSet<Tuple3<String, String, Integer>> getUserSongTriplets(ExecutionEnvironment env) {
 		List<Tuple3<String, String, Integer>> triplets = new ArrayList<Tuple3<String, String, Integer>>();
-		
+
 		triplets.add(new Tuple3<String, String, Integer>("user_1", "song_1", 100));
 		triplets.add(new Tuple3<String, String, Integer>("user_1", "song_2", 10));
 		triplets.add(new Tuple3<String, String, Integer>("user_1", "song_3", 20));
 		triplets.add(new Tuple3<String, String, Integer>("user_1", "song_4", 30));
 		triplets.add(new Tuple3<String, String, Integer>("user_1", "song_5", 1));
-		
+
 		triplets.add(new Tuple3<String, String, Integer>("user_2", "song_6", 40));
 		triplets.add(new Tuple3<String, String, Integer>("user_2", "song_7", 10));
 		triplets.add(new Tuple3<String, String, Integer>("user_2", "song_8", 3));
-		
+
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_1", 100));
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_2", 10));
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_3", 20));
@@ -54,14 +54,14 @@ public class MusicProfilesData {
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_12", 30));
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_13", 34));
 		triplets.add(new Tuple3<String, String, Integer>("user_3", "song_14", 17));
-		
+
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_1", 100));
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_6", 10));
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_8", 20));
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_12", 30));
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_13", 1));
 		triplets.add(new Tuple3<String, String, Integer>("user_4", "song_15", 1));
-		
+
 		triplets.add(new Tuple3<String, String, Integer>("user_5", "song_3", 300));
 		triplets.add(new Tuple3<String, String, Integer>("user_5", "song_4", 4));
 		triplets.add(new Tuple3<String, String, Integer>("user_5", "song_5", 5));
@@ -76,7 +76,7 @@ public class MusicProfilesData {
 
 		return env.fromCollection(triplets);
 	}
-	
+
 	public static DataSet<String> getMismatches(ExecutionEnvironment env) {
 		List<String> errors = new ArrayList<String>();
 		errors.add("ERROR: <song_8 track_8> Sever");

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java
index a45de88..1c3ebb0 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/PageRankData.java
@@ -18,21 +18,21 @@
 
 package org.apache.flink.graph.examples.data;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Provides the default data set used for the PageRank test program.
  * If no parameters are given to the program, the default edge data set is used.
  */
 public class PageRankData {
-	
+
 	public static final String EDGES = "2	1\n" +
-										"5	2\n" + 
+										"5	2\n" +
 										"5	4\n" +
 										"4	3\n" +
 										"4	2\n" +
@@ -41,9 +41,9 @@ public class PageRankData {
 										"1	3\n" +
 										"3	5\n";
 
-	
+
 	public static final String RANKS_AFTER_3_ITERATIONS = "1,0.237\n" +
-														"2,0.248\n" + 
+														"2,0.248\n" +
 														"3,0.173\n" +
 														"4,0.175\n" +
 														"5,0.165\n";

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SingleSourceShortestPathsData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SingleSourceShortestPathsData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SingleSourceShortestPathsData.java
index 59d14ad..5824a98 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SingleSourceShortestPathsData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SingleSourceShortestPathsData.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.graph.examples.data;
 
-import java.util.LinkedList;
-import java.util.List;
-
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
 
+import java.util.LinkedList;
+import java.util.List;
+
 /**
  * Provides the default data set used for the Single Source Shortest Paths example program.
  * If no parameters are given to the program, the default edge data set is used.
@@ -46,7 +46,7 @@ public class SingleSourceShortestPathsData {
 		new Object[]{5L, 1L, 51.0}
 	};
 
-	public static final String RESULTED_SINGLE_SOURCE_SHORTEST_PATHS =  "1,0.0\n" + "2,12.0\n" + "3,13.0\n" + 
+	public static final String RESULTED_SINGLE_SOURCE_SHORTEST_PATHS =  "1,0.0\n" + "2,12.0\n" + "3,13.0\n" +
 								"4,47.0\n" + "5,48.0";
 
 	public static DataSet<Edge<Long, Double>> getDefaultEdgeDataSet(ExecutionEnvironment env) {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SummarizationData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SummarizationData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SummarizationData.java
index 703b66e..dafedec 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SummarizationData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/SummarizationData.java
@@ -37,9 +37,9 @@ public class SummarizationData {
 	/**
 	 * Vertices of the input graph.
 	 *
-	 * Format:
+	 * <p>Format:
 	 *
-	 * "vertex-id;vertex-value"
+	 * <p>"vertex-id;vertex-value"
 	 */
 	private static final String[] INPUT_VERTICES = new String[] {
 		"0;1",
@@ -53,9 +53,9 @@ public class SummarizationData {
 	/**
 	 * Edges of the input graph.
 	 *
-	 * Format:
+	 * <p>Format:
 	 *
-	 * "source-id;target-id;edge-value
+	 * <p>"source-id;target-id;edge-value
 	 */
 	private static final String[] INPUT_EDGES = new String[] {
 		"0;1;1",
@@ -73,9 +73,9 @@ public class SummarizationData {
 	/**
 	 * The resulting vertex id can be any id of the vertices summarized by the single vertex.
 	 *
-	 * Format:
+	 * <p>Format:
 	 *
-	 * "possible-id[,possible-id];group-value,group-count"
+	 * <p>"possible-id[,possible-id];group-value,group-count"
 	 */
 	public static final String[] EXPECTED_VERTICES = new String[] {
 			"0,1;1,2",
@@ -84,9 +84,11 @@ public class SummarizationData {
 	};
 
 	/**
-	 * Format:
+	 * The expected output from the input edges.
 	 *
-	 * "possible-source-id[,possible-source-id];possible-target-id[,possible-target-id];group-value,group-count"
+	 * <p>Format:
+	 *
+	 * <p>"possible-source-id[,possible-source-id];possible-target-id[,possible-target-id];group-value,group-count"
 	 */
 	public static final String[] EXPECTED_EDGES_WITH_VALUES = new String[] {
 			"0,1;0,1;1,2",
@@ -98,9 +100,11 @@ public class SummarizationData {
 	};
 
 	/**
-	 * Format:
+	 * The expected output from the input edges translated to null values.
+	 *
+	 * <p>Format:
 	 *
-	 * "possible-source-id[,possible-source-id];possible-target-id[,possible-target-id];group-value,group-count"
+	 * <p>"possible-source-id[,possible-source-id];possible-target-id[,possible-target-id];group-value,group-count"
 	 */
 	public static final String[] EXPECTED_EDGES_ABSENT_VALUES = new String[] {
 			"0,1;0,1;(null),2",

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java
index cf3a715..7f4f926 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java
@@ -33,7 +33,15 @@ import java.util.List;
  */
 public class TriangleCountData {
 
-	public static final String EDGES = "1	2\n"+"1	3\n"+"2	3\n"+"2	6\n"+"3	4\n"+"3	5\n"+"3	6\n"+"4	5\n"+"6	7\n";
+	public static final String EDGES = "1	2\n" +
+		"1	3\n" +
+		"2	3\n" +
+		"2	6\n" +
+		"3	4\n" +
+		"3	5\n" +
+		"3	6\n" +
+		"4	5\n" +
+		"6	7\n";
 
 	public static DataSet<Edge<Long, NullValue>> getDefaultEdgeDataSet(ExecutionEnvironment env) {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/resources/logback.xml
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/resources/logback.xml b/flink-libraries/flink-gelly-examples/src/main/resources/logback.xml
index 95f2d04..4b87118 100644
--- a/flink-libraries/flink-gelly-examples/src/main/resources/logback.xml
+++ b/flink-libraries/flink-gelly-examples/src/main/resources/logback.xml
@@ -26,4 +26,4 @@
     <root level="INFO">
         <appender-ref ref="STDOUT"/>
     </root>
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/ConnectedComponents.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/ConnectedComponents.scala b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/ConnectedComponents.scala
index b49a520..d3f2f92 100644
--- a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/ConnectedComponents.scala
+++ b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/ConnectedComponents.scala
@@ -18,14 +18,15 @@
 
 package org.apache.flink.graph.scala.examples
 
+import java.lang.Long
+
+import org.apache.flink.api.common.functions.MapFunction
 import org.apache.flink.api.scala._
-import org.apache.flink.graph.library.GSAConnectedComponents
-import org.apache.flink.graph.scala._
 import org.apache.flink.graph.Edge
 import org.apache.flink.graph.examples.data.ConnectedComponentsDefaultData
+import org.apache.flink.graph.library.GSAConnectedComponents
+import org.apache.flink.graph.scala._
 import org.apache.flink.types.NullValue
-import org.apache.flink.api.common.functions.MapFunction
-import java.lang.Long
 
 /**
  * This example shows how to use Gelly's library methods.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/GSASingleSourceShortestPaths.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/GSASingleSourceShortestPaths.scala b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/GSASingleSourceShortestPaths.scala
index 0a10ad7..bd5c63b 100644
--- a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/GSASingleSourceShortestPaths.scala
+++ b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/GSASingleSourceShortestPaths.scala
@@ -21,10 +21,10 @@ package org.apache.flink.graph.scala.examples
 import org.apache.flink.api.common.functions.MapFunction
 import org.apache.flink.api.scala._
 import org.apache.flink.graph.Edge
+import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData
 import org.apache.flink.graph.gsa.{ApplyFunction, GatherFunction, Neighbor, SumFunction}
 import org.apache.flink.graph.scala._
 import org.apache.flink.graph.scala.utils.Tuple3ToEdgeMap
-import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData
 
 /**
  * This example shows how to use Gelly's gather-sum-apply iterations.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/SingleSourceShortestPaths.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/SingleSourceShortestPaths.scala b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/SingleSourceShortestPaths.scala
index 2d623e7..f3b10f7 100644
--- a/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/SingleSourceShortestPaths.scala
+++ b/flink-libraries/flink-gelly-examples/src/main/scala/org/apache/flink/graph/scala/examples/SingleSourceShortestPaths.scala
@@ -18,16 +18,15 @@
 
 package org.apache.flink.graph.scala.examples
 
-import org.apache.flink.api.scala._
-import org.apache.flink.graph.scala._
-import org.apache.flink.graph.Edge
 import org.apache.flink.api.common.functions.MapFunction
-import org.apache.flink.graph.spargel.{MessageIterator, ScatterFunction, GatherFunction}
-import org.apache.flink.graph.Vertex
+import org.apache.flink.api.scala._
 import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData
+import org.apache.flink.graph.scala._
+import org.apache.flink.graph.scala.utils.Tuple3ToEdgeMap
+import org.apache.flink.graph.spargel.{GatherFunction, MessageIterator, ScatterFunction}
+import org.apache.flink.graph.{Edge, Vertex}
 
 import scala.collection.JavaConversions._
-import org.apache.flink.graph.scala.utils.Tuple3ToEdgeMap
 
 /**
  * This example shows how to use Gelly's scatter-gather iterations.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/RunnerITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/RunnerITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/RunnerITCase.java
index f93dc31..8d17c39 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/RunnerITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/RunnerITCase.java
@@ -20,12 +20,16 @@ package org.apache.flink.graph;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.drivers.DriverBaseITCase;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link Runner}.
+ */
 @RunWith(Parameterized.class)
 public class RunnerITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/AdamicAdarITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/AdamicAdarITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/AdamicAdarITCase.java
index 2548263..c36dcb5 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/AdamicAdarITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/AdamicAdarITCase.java
@@ -18,13 +18,17 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.ArrayUtils;
 import org.apache.flink.client.program.ProgramParametrizationException;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link AdamicAdar}.
+ */
 @RunWith(Parameterized.class)
 public class AdamicAdarITCase
 extends CopyableValueDriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ClusteringCoefficientITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ClusteringCoefficientITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ClusteringCoefficientITCase.java
index 86eee01..ce8cb3b 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ClusteringCoefficientITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ClusteringCoefficientITCase.java
@@ -20,11 +20,15 @@ package org.apache.flink.graph.drivers;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link ClusteringCoefficient}.
+ */
 @RunWith(Parameterized.class)
 public class ClusteringCoefficientITCase
 extends CopyableValueDriverBaseITCase {
@@ -83,25 +87,25 @@ extends CopyableValueDriverBaseITCase {
 
 	@Test
 	public void testHashWithSmallUndirectedRMatGraph() throws Exception {
-		long directed_checksum;
-		long undirected_checksum;
+		long directedChecksum;
+		long undirectedChecksum;
 		switch (idType) {
 			case "byte":
 			case "short":
 			case "char":
 			case "integer":
-				directed_checksum = 0x0000003875b38c43L;
-				undirected_checksum = 0x0000003c20344c75L;
+				directedChecksum = 0x0000003875b38c43L;
+				undirectedChecksum = 0x0000003c20344c75L;
 				break;
 
 			case "long":
-				directed_checksum = 0x0000003671970c59L;
-				undirected_checksum = 0x0000003939645d8cL;
+				directedChecksum = 0x0000003671970c59L;
+				undirectedChecksum = 0x0000003939645d8cL;
 				break;
 
 			case "string":
-				directed_checksum = 0x0000003be109a770L;
-				undirected_checksum = 0x0000003b8c98d14aL;
+				directedChecksum = 0x0000003be109a770L;
+				undirectedChecksum = 0x0000003b8c98d14aL;
 				break;
 
 			default:
@@ -113,9 +117,9 @@ extends CopyableValueDriverBaseITCase {
 			"vertex count: 117, average clustering coefficient: 0.57438679[0-9]+\n";
 
 		expectedOutput(parameters(7, "directed", "undirected", "hash"),
-			"\n" + new Checksum(117, directed_checksum) + expected);
+			"\n" + new Checksum(117, directedChecksum) + expected);
 		expectedOutput(parameters(7, "undirected", "undirected", "hash"),
-			"\n" + new Checksum(117, undirected_checksum) + expected);
+			"\n" + new Checksum(117, undirectedChecksum) + expected);
 	}
 
 	@Test
@@ -159,8 +163,8 @@ extends CopyableValueDriverBaseITCase {
 		// computation is too large for collection mode
 		Assume.assumeFalse(mode == TestExecutionMode.COLLECTION);
 
-		long directed_checksum;
-		long undirected_checksum;
+		long directedChecksum;
+		long undirectedChecksum;
 		switch (idType) {
 			case "byte":
 				return;
@@ -168,18 +172,18 @@ extends CopyableValueDriverBaseITCase {
 			case "short":
 			case "char":
 			case "integer":
-				directed_checksum = 0x00000681fad1587eL;
-				undirected_checksum = 0x0000068713b3b7f1L;
+				directedChecksum = 0x00000681fad1587eL;
+				undirectedChecksum = 0x0000068713b3b7f1L;
 				break;
 
 			case "long":
-				directed_checksum = 0x000006928a6301b1L;
-				undirected_checksum = 0x000006a399edf0e6L;
+				directedChecksum = 0x000006928a6301b1L;
+				undirectedChecksum = 0x000006a399edf0e6L;
 				break;
 
 			case "string":
-				directed_checksum = 0x000006749670a2f7L;
-				undirected_checksum = 0x0000067f19c6c4d5L;
+				directedChecksum = 0x000006749670a2f7L;
+				undirectedChecksum = 0x0000067f19c6c4d5L;
 				break;
 
 			default:
@@ -191,8 +195,8 @@ extends CopyableValueDriverBaseITCase {
 			"vertex count: 3349, average clustering coefficient: 0.33029442[0-9]+\n";
 
 		expectedOutput(parameters(12, "directed", "undirected", "hash"),
-			"\n" + new Checksum(3349, directed_checksum) + expected);
+			"\n" + new Checksum(3349, directedChecksum) + expected);
 		expectedOutput(parameters(12, "undirected", "undirected", "hash"),
-			"\n" + new Checksum(3349, undirected_checksum) + expected);
+			"\n" + new Checksum(3349, undirectedChecksum) + expected);
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ConnectedComponentsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ConnectedComponentsITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ConnectedComponentsITCase.java
index 95f0c66..cbb5c9d 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ConnectedComponentsITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/ConnectedComponentsITCase.java
@@ -20,11 +20,15 @@ package org.apache.flink.graph.drivers;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link ConnectedComponents}.
+ */
 @RunWith(Parameterized.class)
 public class ConnectedComponentsITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/DriverBaseITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/DriverBaseITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/DriverBaseITCase.java
index 5b0e42e..670968c 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/DriverBaseITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/DriverBaseITCase.java
@@ -18,11 +18,12 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.flink.graph.Runner;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.util.FlinkRuntimeException;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.hamcrest.Description;
 import org.hamcrest.TypeSafeMatcher;
 import org.junit.Assert;
@@ -38,7 +39,8 @@ import java.util.List;
 import java.util.regex.Pattern;
 
 /**
- *
+ * Base class for driver integration tests providing utility methods for
+ * verifying program output.
  */
 public abstract class DriverBaseITCase
 extends MultipleProgramsTestBase {
@@ -187,7 +189,7 @@ extends MultipleProgramsTestBase {
 		switch (mode) {
 			case CLUSTER:
 			case COLLECTION:
-				args = (String[])ArrayUtils.add(args, "--__disable_object_reuse");
+				args = ArrayUtils.add(args, "--__disable_object_reuse");
 				break;
 
 			case CLUSTER_OBJECT_REUSE:
@@ -214,7 +216,7 @@ extends MultipleProgramsTestBase {
 	 * Implements a Hamcrest regex matcher. Hamcrest 2.0 provides
 	 * Matchers.matchesPattern(String) but Flink depends on Hamcrest 1.3.
 	 *
-	 * see http://stackoverflow.com/a/25021229
+	 * <p>see http://stackoverflow.com/a/25021229
 	 */
 	private static class RegexMatcher
 	extends TypeSafeMatcher<String> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/EdgeListITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/EdgeListITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/EdgeListITCase.java
index d3ba4fb..15f7293 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/EdgeListITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/EdgeListITCase.java
@@ -18,14 +18,18 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.ArrayUtils;
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link EdgeList}.
+ */
 @RunWith(Parameterized.class)
 public class EdgeListITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/GraphMetricsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/GraphMetricsITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/GraphMetricsITCase.java
index 8c5ed86..1045a38 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/GraphMetricsITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/GraphMetricsITCase.java
@@ -19,11 +19,15 @@
 package org.apache.flink.graph.drivers;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link GraphMetrics}.
+ */
 @RunWith(Parameterized.class)
 public class GraphMetricsITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/HITSITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/HITSITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/HITSITCase.java
index 282d3d5..51a63eb 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/HITSITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/HITSITCase.java
@@ -19,11 +19,15 @@
 package org.apache.flink.graph.drivers;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link HITS}.
+ */
 @RunWith(Parameterized.class)
 public class HITSITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/JaccardIndexITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/JaccardIndexITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/JaccardIndexITCase.java
index 0391771..a8b0111 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/JaccardIndexITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/JaccardIndexITCase.java
@@ -18,14 +18,18 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.ArrayUtils;
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link JaccardIndex}.
+ */
 @RunWith(Parameterized.class)
 public class JaccardIndexITCase
 extends CopyableValueDriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/PageRankITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/PageRankITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/PageRankITCase.java
index 4ca0a85..8e86810 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/PageRankITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/PageRankITCase.java
@@ -19,11 +19,15 @@
 package org.apache.flink.graph.drivers;
 
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link PageRank}.
+ */
 @RunWith(Parameterized.class)
 public class PageRankITCase
 extends DriverBaseITCase {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/TriangleListingITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/TriangleListingITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/TriangleListingITCase.java
index fabdae1..1e330dd 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/TriangleListingITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/TriangleListingITCase.java
@@ -18,14 +18,18 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.ArrayUtils;
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Assume;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * Tests for {@link TriangleListing}.
+ */
 @RunWith(Parameterized.class)
 public class TriangleListingITCase
 extends CopyableValueDriverBaseITCase {
@@ -227,7 +231,6 @@ extends CopyableValueDriverBaseITCase {
 		expectedOutput(parameters(12, "undirected", "hash"), expected);
 	}
 
-
 	@Test
 	public void testPrintWithSmallDirectedRMatGraph() throws Exception {
 		// skip 'char' since it is not printed as a number
@@ -253,7 +256,6 @@ extends CopyableValueDriverBaseITCase {
 		expectedOutputChecksum(parameters(7, "directed", "print"), new Checksum(3822, checksum));
 	}
 
-
 	@Test
 	public void testPrintWithSmallUndirectedRMatGraph() throws Exception {
 		// skip 'char' since it is not printed as a number

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/input/GeneratedGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/input/GeneratedGraphTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/input/GeneratedGraphTest.java
index da77b0d..1608f95 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/input/GeneratedGraphTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/input/GeneratedGraphTest.java
@@ -21,21 +21,25 @@ package org.apache.flink.graph.drivers.input;
 import org.apache.flink.graph.asm.translate.TranslateFunction;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToChar;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToCharValue;
+import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToLong;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToString;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToUnsignedByte;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToUnsignedByteValue;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToUnsignedInt;
-import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToLong;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToUnsignedShort;
 import org.apache.flink.graph.drivers.input.GeneratedGraph.LongValueToUnsignedShortValue;
 import org.apache.flink.types.ByteValue;
 import org.apache.flink.types.CharValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.ShortValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link GeneratedGraph}.
+ */
 public class GeneratedGraphTest {
 
 	private TranslateFunction<LongValue, ByteValue> byteValueTranslator = new LongValueToUnsignedByteValue();
@@ -61,12 +65,12 @@ public class GeneratedGraphTest {
 		assertEquals(new ByteValue((byte) -1), byteValueTranslator.translate(new LongValue((1L << 8) - 1), byteValue));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testByteValueTranslationUpperOutOfRange() throws Exception {
 		byteValueTranslator.translate(new LongValue(1L << 8), byteValue);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testByteValueTranslationLowerOutOfRange() throws Exception {
 		byteValueTranslator.translate(new LongValue(-1), byteValue);
 	}
@@ -80,12 +84,12 @@ public class GeneratedGraphTest {
 		assertEquals(Byte.valueOf((byte) -1), byteTranslator.translate(new LongValue((1L << 8) - 1), null));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testByteTranslationUpperOutOfRange() throws Exception {
 		byteTranslator.translate(new LongValue(1L << 8), null);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testByteTranslationLowerOutOfRange() throws Exception {
 		byteTranslator.translate(new LongValue(-1), null);
 	}
@@ -99,12 +103,12 @@ public class GeneratedGraphTest {
 		assertEquals(new ShortValue((short) -1), shortValueTranslator.translate(new LongValue((1L << 16) - 1), shortValue));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testShortValueTranslationUpperOutOfRange() throws Exception {
 		shortValueTranslator.translate(new LongValue(1L << 16), shortValue);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testShortValueTranslationLowerOutOfRange() throws Exception {
 		shortValueTranslator.translate(new LongValue(-1), shortValue);
 	}
@@ -118,12 +122,12 @@ public class GeneratedGraphTest {
 		assertEquals(Short.valueOf((short) -1), shortTranslator.translate(new LongValue((1L << 16) - 1), null));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testShortTranslationUpperOutOfRange() throws Exception {
 		shortTranslator.translate(new LongValue(1L << 16), null);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testShortTranslationLowerOutOfRange() throws Exception {
 		shortTranslator.translate(new LongValue(-1), null);
 	}
@@ -136,12 +140,12 @@ public class GeneratedGraphTest {
 		assertEquals(new CharValue(Character.MAX_VALUE), charValueTranslator.translate(new LongValue((long) Character.MAX_VALUE), charValue));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testCharValueTranslationUpperOutOfRange() throws Exception {
 		charValueTranslator.translate(new LongValue(1L << 16), charValue);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testCharValueTranslationLowerOutOfRange() throws Exception {
 		charValueTranslator.translate(new LongValue(-1), charValue);
 	}
@@ -154,12 +158,12 @@ public class GeneratedGraphTest {
 		assertEquals(Character.valueOf(Character.MAX_VALUE), charTranslator.translate(new LongValue((long) Character.MAX_VALUE), null));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testCharacterTranslationUpperOutOfRange() throws Exception {
 		charTranslator.translate(new LongValue(1L << 16), null);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testCharacterTranslationLowerOutOfRange() throws Exception {
 		charTranslator.translate(new LongValue(-1), null);
 	}
@@ -173,12 +177,12 @@ public class GeneratedGraphTest {
 		assertEquals(Integer.valueOf(-1), intTranslator.translate(new LongValue((1L << 32) - 1), null));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testIntegerTranslationUpperOutOfRange() throws Exception {
 		intTranslator.translate(new LongValue(1L << 32), null);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testIntegerTranslationLowerOutOfRange() throws Exception {
 		intTranslator.translate(new LongValue(-1), null);
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/BooleanParameterTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/BooleanParameterTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/BooleanParameterTest.java
index 43bba88..8c4bdcb 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/BooleanParameterTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/BooleanParameterTest.java
@@ -19,10 +19,14 @@
 package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+/**
+ * Tests for {@link BooleanParameter}.
+ */
 public class BooleanParameterTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ChoiceParameterTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ChoiceParameterTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ChoiceParameterTest.java
index 1ed1af3..73800fb 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ChoiceParameterTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ChoiceParameterTest.java
@@ -20,12 +20,16 @@ package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+/**
+ * Tests for {@link ChoiceParameter}.
+ */
 public class ChoiceParameterTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/DoubleParameterTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/DoubleParameterTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/DoubleParameterTest.java
index 7a4d4fa..a4e8125 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/DoubleParameterTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/DoubleParameterTest.java
@@ -20,12 +20,16 @@ package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+/**
+ * Tests for {@link DoubleParameter}.
+ */
 public class DoubleParameterTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/IterationConvergenceTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/IterationConvergenceTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/IterationConvergenceTest.java
index ae92943..e6240b1 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/IterationConvergenceTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/IterationConvergenceTest.java
@@ -19,10 +19,14 @@
 package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+/**
+ * Tests for {@link IterationConvergence}.
+ */
 public class IterationConvergenceTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/LongParameterTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/LongParameterTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/LongParameterTest.java
index 2c26268..4b624a6 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/LongParameterTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/LongParameterTest.java
@@ -20,12 +20,16 @@ package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.client.program.ProgramParametrizationException;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+/**
+ * Tests for {@link LongParameter}.
+ */
 public class LongParameterTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ParameterTestBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ParameterTestBase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ParameterTestBase.java
index a5dc0c6..3a1d0e1 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ParameterTestBase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/ParameterTestBase.java
@@ -20,7 +20,10 @@ package org.apache.flink.graph.drivers.parameter;
 
 import org.junit.Before;
 
-public class ParameterTestBase {
+/**
+ * Base class for {@link Parameter} tests.
+ */
+public abstract class ParameterTestBase {
 
 	protected ParameterizedBase owner;
 
@@ -29,7 +32,7 @@ public class ParameterTestBase {
 		owner = new MockParameterized();
 	}
 
-	protected static class MockParameterized
+	private static class MockParameterized
 	extends ParameterizedBase {
 		@Override
 		public String getName() {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/SimplifyTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/SimplifyTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/SimplifyTest.java
index 12ae7dc..126a0ac 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/SimplifyTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/SimplifyTest.java
@@ -20,10 +20,14 @@ package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.graph.drivers.parameter.Simplify.Ordering;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+/**
+ * Tests for {@link Simplify}.
+ */
 public class SimplifyTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/StringParameterTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/StringParameterTest.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/StringParameterTest.java
index 496d85c..a551859 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/StringParameterTest.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/drivers/parameter/StringParameterTest.java
@@ -19,12 +19,16 @@
 package org.apache.flink.graph.drivers.parameter;
 
 import org.apache.flink.api.java.utils.ParameterTool;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
+/**
+ * Tests for {@link StringParameter}.
+ */
 public class StringParameterTest
 extends ParameterTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java
index c37d1ed..8b8c44e 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/CommunityDetectionITCase.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.examples.data.CommunityDetectionData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link CommunityDetection}.
+ */
 @RunWith(Parameterized.class)
 public class CommunityDetectionITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java
index e6ba794..d61c71d 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/LabelPropagationITCase.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.examples.data.LabelPropagationData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link LabelPropagation}.
+ */
 @RunWith(Parameterized.class)
 public class LabelPropagationITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java
index fe4cd24..baad6d0 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/SummarizationITCase.java
@@ -31,6 +31,7 @@ import org.apache.flink.graph.examples.data.SummarizationData;
 import org.apache.flink.graph.library.Summarization.EdgeValue;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -45,6 +46,9 @@ import java.util.regex.Pattern;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link Summarization}.
+ */
 @RunWith(Parameterized.class)
 public class SummarizationITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java
index 176a7e1..2e1cc7c 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/library/TriangleEnumeratorITCase.java
@@ -24,6 +24,7 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.examples.data.TriangleCountData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -31,6 +32,9 @@ import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link TriangleEnumerator}.
+ */
 @RunWith(Parameterized.class)
 public class TriangleEnumeratorITCase extends MultipleProgramsTestBase {
 
@@ -50,8 +54,8 @@ public class TriangleEnumeratorITCase extends MultipleProgramsTestBase {
 		List<Tuple3<Long, Long, Long>> expectedResult = TriangleCountData.getListOfTriangles();
 
 		Assert.assertEquals(expectedResult.size(), actualOutput.size());
-		for(Tuple3<Long, Long, Long> resultTriangle:actualOutput)	{
-			Assert.assertTrue(expectedResult.indexOf(resultTriangle)>=0);
+		for (Tuple3<Long, Long, Long> resultTriangle:actualOutput)	{
+			Assert.assertTrue(expectedResult.indexOf(resultTriangle) >= 0);
 		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java
index 19cf677..066f2c4 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/GatherSumApplyITCase.java
@@ -34,12 +34,16 @@ import org.apache.flink.graph.utils.GraphUtils.IdentityMapper;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for gather-sum-apply.
+ */
 @RunWith(Parameterized.class)
 public class GatherSumApplyITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/EuclideanGraphWeighingITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/EuclideanGraphWeighingITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/EuclideanGraphWeighingITCase.java
index 922c4b2..deac519 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/EuclideanGraphWeighingITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/EuclideanGraphWeighingITCase.java
@@ -18,12 +18,13 @@
 
 package org.apache.flink.graph.test.examples;
 
-import com.google.common.base.Charsets;
-import com.google.common.io.Files;
 import org.apache.flink.graph.examples.EuclideanGraphWeighing;
 import org.apache.flink.graph.examples.data.EuclideanGraphData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.test.util.TestBaseUtils;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -34,6 +35,9 @@ import org.junit.runners.Parameterized;
 
 import java.io.File;
 
+/**
+ * Tests for {@link EuclideanGraphWeighing}.
+ */
 @RunWith(Parameterized.class)
 public class EuclideanGraphWeighingITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/IncrementalSSSPITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/IncrementalSSSPITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/IncrementalSSSPITCase.java
index de92666..9147a6d 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/IncrementalSSSPITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/IncrementalSSSPITCase.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.test.examples;
 
-import com.google.common.base.Charsets;
-import com.google.common.io.Files;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
@@ -31,6 +29,9 @@ import org.apache.flink.graph.examples.data.IncrementalSSSPData;
 import org.apache.flink.graph.spargel.ScatterGatherConfiguration;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.test.util.TestBaseUtils;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -41,6 +42,9 @@ import org.junit.runners.Parameterized;
 
 import java.io.File;
 
+/**
+ * Tests for {@link IncrementalSSSP}.
+ */
 @RunWith(Parameterized.class)
 public class IncrementalSSSPITCase extends MultipleProgramsTestBase {
 
@@ -104,7 +108,7 @@ public class IncrementalSSSPITCase extends MultipleProgramsTestBase {
 		// configure the iteration
 		ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
 
-		if(IncrementalSSSP.isInSSSP(edgeToBeRemoved, edgesInSSSP)) {
+		if (IncrementalSSSP.isInSSSP(edgeToBeRemoved, edgesInSSSP)) {
 
 			parameters.setDirection(EdgeDirection.IN);
 			parameters.setOptDegrees(true);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/MusicProfilesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/MusicProfilesITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/MusicProfilesITCase.java
index d76a3ec..be412cf 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/MusicProfilesITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/MusicProfilesITCase.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.graph.test.examples;
 
-import com.google.common.base.Charsets;
-import com.google.common.io.Files;
-
 import org.apache.flink.graph.examples.MusicProfiles;
 import org.apache.flink.graph.examples.data.MusicProfilesData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.test.util.TestBaseUtils;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -38,6 +38,9 @@ import java.io.File;
 import java.util.ArrayList;
 import java.util.Arrays;
 
+/**
+ * Tests for {@link MusicProfiles}.
+ */
 @RunWith(Parameterized.class)
 public class MusicProfilesITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java
index 41f9a0f..62ed39f 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/PageRankITCase.java
@@ -26,6 +26,7 @@ import org.apache.flink.graph.examples.GSAPageRank;
 import org.apache.flink.graph.examples.PageRank;
 import org.apache.flink.graph.examples.data.PageRankData;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -34,6 +35,9 @@ import org.junit.runners.Parameterized;
 import java.util.Arrays;
 import java.util.List;
 
+/**
+ * Tests for {@link PageRank}.
+ */
 @RunWith(Parameterized.class)
 public class PageRankITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/SingleSourceShortestPathsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/SingleSourceShortestPathsITCase.java b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/SingleSourceShortestPathsITCase.java
index 2fd8812..60bd161 100644
--- a/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/SingleSourceShortestPathsITCase.java
+++ b/flink-libraries/flink-gelly-examples/src/test/java/org/apache/flink/graph/test/examples/SingleSourceShortestPathsITCase.java
@@ -18,15 +18,15 @@
 
 package org.apache.flink.graph.test.examples;
 
-import com.google.common.base.Charsets;
-import com.google.common.io.Files;
-
 import org.apache.flink.graph.examples.GSASingleSourceShortestPaths;
+import org.apache.flink.graph.examples.PregelSSSP;
 import org.apache.flink.graph.examples.SingleSourceShortestPaths;
 import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData;
-import org.apache.flink.graph.examples.PregelSSSP;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.test.util.TestBaseUtils;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.Files;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -37,6 +37,9 @@ import org.junit.runners.Parameterized;
 
 import java.io.File;
 
+/**
+ * Tests for {@link SingleSourceShortestPaths}.
+ */
 @RunWith(Parameterized.class)
 public class SingleSourceShortestPathsITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/GellyScalaAPICompletenessTest.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/GellyScalaAPICompletenessTest.scala b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/GellyScalaAPICompletenessTest.scala
index 034bf77..d224f0f 100644
--- a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/GellyScalaAPICompletenessTest.scala
+++ b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/GellyScalaAPICompletenessTest.scala
@@ -18,12 +18,14 @@
 package org.apache.flink.graph.scala.test
 
 import java.lang.reflect.Method
-import org.apache.flink.graph.scala._
+
 import org.apache.flink.api.scala.completeness.ScalaAPICompletenessTestBase
+import org.apache.flink.graph.scala._
 import org.apache.flink.graph.{Graph => JavaGraph}
-import scala.language.existentials
 import org.junit.Test
 
+import scala.language.existentials
+
 /**
  * This checks whether the Gelly Scala API is up to feature parity with the Java API.
  * Implements the [[ScalaAPICompletenessTestBase]] for Gelly.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphMutationsITCase.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphMutationsITCase.scala b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphMutationsITCase.scala
index 83980a7..17b68db 100644
--- a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphMutationsITCase.scala
+++ b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphMutationsITCase.scala
@@ -23,9 +23,9 @@ import org.apache.flink.graph.scala._
 import org.apache.flink.graph.scala.test.TestGraphUtils
 import org.apache.flink.graph.{Edge, Vertex}
 import org.apache.flink.test.util.{MultipleProgramsTestBase, TestBaseUtils}
+import org.junit.Test
 import org.junit.runner.RunWith
 import org.junit.runners.Parameterized
-import org.junit.Test
 
 import _root_.scala.collection.JavaConverters._
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphOperationsITCase.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphOperationsITCase.scala b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphOperationsITCase.scala
index 9d77e68..76de25e 100644
--- a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphOperationsITCase.scala
+++ b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/GraphOperationsITCase.scala
@@ -23,9 +23,10 @@ import org.apache.flink.graph.scala._
 import org.apache.flink.graph.scala.test.TestGraphUtils
 import org.apache.flink.graph.{Edge, Vertex}
 import org.apache.flink.test.util.{MultipleProgramsTestBase, TestBaseUtils}
+import org.junit.Test
 import org.junit.runner.RunWith
 import org.junit.runners.Parameterized
-import org.junit.Test
+
 import _root_.scala.collection.JavaConverters._
 
 @RunWith(classOf[Parameterized])

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/JoinWithEdgesITCase.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/JoinWithEdgesITCase.scala b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/JoinWithEdgesITCase.scala
index 0a7f1b9..aa2c015 100644
--- a/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/JoinWithEdgesITCase.scala
+++ b/flink-libraries/flink-gelly-scala/src/test/scala/org/apache/flink/graph/scala/test/operations/JoinWithEdgesITCase.scala
@@ -20,10 +20,10 @@ package org.apache.flink.graph.scala.test.operations
 
 import org.apache.flink.api.common.functions.MapFunction
 import org.apache.flink.api.scala._
-import org.apache.flink.graph.{Edge, EdgeJoinFunction}
 import org.apache.flink.graph.scala._
 import org.apache.flink.graph.scala.test.TestGraphUtils
 import org.apache.flink.graph.scala.utils.EdgeToTuple3Map
+import org.apache.flink.graph.{Edge, EdgeJoinFunction}
 import org.apache.flink.test.util.{MultipleProgramsTestBase, TestBaseUtils}
 import org.junit.Test
 import org.junit.runner.RunWith

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/pom.xml
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/pom.xml b/flink-libraries/flink-gelly/pom.xml
index d620a66..c059267 100644
--- a/flink-libraries/flink-gelly/pom.xml
+++ b/flink-libraries/flink-gelly/pom.xml
@@ -83,4 +83,44 @@ under the License.
 			<scope>test</scope>
 		</dependency>
 	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<dependencies>
+					<dependency>
+						<groupId>com.puppycrawl.tools</groupId>
+						<artifactId>checkstyle</artifactId>
+						<version>6.19</version>
+					</dependency>
+				</dependencies>
+				<configuration>
+					<configLocation>/tools/maven/strict-checkstyle.xml</configLocation>
+					<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+					<logViolationsToConsole>true</logViolationsToConsole>
+					<failOnViolation>true</failOnViolation>
+				</configuration>
+				<executions>
+					<!--
+					Execute checkstyle after compilation but before tests.
+
+					This ensures that any parsing or type checking errors are from
+					javac, so they look as expected. Beyond that, we want to
+					fail as early as possible.
+					-->
+					<execution>
+						<phase>test-compile</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+
 </project>

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AbstractGraphAnalytic.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AbstractGraphAnalytic.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AbstractGraphAnalytic.java
deleted file mode 100644
index 4d3d055..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AbstractGraphAnalytic.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph;
-
-import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Base class for {@link GraphAnalytic}.
- *
- * @param <K> key type
- * @param <VV> vertex value type
- * @param <EV> edge value type
- * @param <T> the return type
- */
-public abstract class AbstractGraphAnalytic<K, VV, EV, T>
-implements GraphAnalytic<K, VV, EV, T> {
-
-	protected ExecutionEnvironment env;
-
-	@Override
-	public GraphAnalytic<K, VV, EV, T> run(Graph<K, VV, EV> input)
-			throws Exception {
-		env = input.getContext();
-		return this;
-	}
-
-	@Override
-	public T execute()
-			throws Exception {
-		env.execute();
-		return getResult();
-	}
-
-	@Override
-	public T execute(String jobName)
-			throws Exception {
-		Preconditions.checkNotNull(jobName);
-
-		env.execute(jobName);
-		return getResult();
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AnalyticHelper.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AnalyticHelper.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AnalyticHelper.java
index dbe3e0c..9bd2336 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AnalyticHelper.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/AnalyticHelper.java
@@ -34,7 +34,7 @@ import java.io.Serializable;
  * Flink accumulators. This computation is cheaply performed in a terminating
  * {@link RichOutputFormat}.
  *
- * This class simplifies the creation of analytic helpers by providing pass-through
+ * <p>This class simplifies the creation of analytic helpers by providing pass-through
  * methods for adding and getting accumulators. Each accumulator name is prefixed
  * with a random string since Flink accumulators share a per-job global namespace.
  * This class also provides empty implementations of {@link RichOutputFormat#open}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeJoinFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeJoinFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeJoinFunction.java
index 698b3b6..e689169 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeJoinFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeJoinFunction.java
@@ -18,11 +18,11 @@
 
 package org.apache.flink.graph;
 
-import java.io.Serializable;
-
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.api.java.DataSet;
 
+import java.io.Serializable;
+
 /**
  * Interface to be implemented by the transformation function
  * applied in {@link Graph#joinWithEdges(DataSet, EdgeJoinFunction)},
@@ -37,7 +37,7 @@ public interface EdgeJoinFunction<EV, T> extends Function, Serializable {
 	/**
 	 * Applies a transformation on the current edge value
 	 * and the value of the matched tuple of the input DataSet.
-	 * 
+	 *
 	 * @param edgeValue the current edge value
 	 * @param inputValue the value of the matched Tuple2 input
 	 * @return the new edge value

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeOrder.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeOrder.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeOrder.java
index 8226b04..9f04a58 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeOrder.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgeOrder.java
@@ -32,7 +32,7 @@ public enum EdgeOrder {
 	private final byte bitmask;
 
 	EdgeOrder(int bitmask) {
-		this.bitmask = (byte)bitmask;
+		this.bitmask = (byte) bitmask;
 	}
 
 	/**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunction.java
index 07e14e9..c59d76e 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunction.java
@@ -18,12 +18,12 @@
 
 package org.apache.flink.graph;
 
-import java.io.Serializable;
-
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.util.Collector;
 
+import java.io.Serializable;
+
 /**
  * Interface to be implemented by the function applied to a vertex neighborhood
  * in the {@link Graph#groupReduceOnEdges(EdgesFunction, EdgeDirection)} method.
@@ -37,16 +37,16 @@ public interface EdgesFunction<K, EV, O> extends Function, Serializable {
 	/**
 	 * This method is called per vertex and can iterate over all of its neighboring edges
 	 * with the specified direction.
-	 * <p>
-	 * If called with {@link EdgeDirection#OUT} the group will contain
+	 *
+	 * <p>If called with {@link EdgeDirection#OUT} the group will contain
 	 * the out-edges of the grouping vertex.
 	 * If called with {@link EdgeDirection#IN} the group will contain
 	 * the in-edges of the grouping vertex.
 	 * If called with {@link EdgeDirection#ALL} the group will contain
 	 * all edges of the grouping vertex.
-	 * <p>
-	 * The method can emit any number of output elements, including none.
-	 * 
+	 *
+	 * <p>The method can emit any number of output elements, including none.
+	 *
 	 * @param edges the neighboring edges of the grouping vertex.
 	 * The first filed of each Tuple2 is the ID of the grouping vertex.
 	 * The second field is the neighboring edge.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunctionWithVertexValue.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunctionWithVertexValue.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunctionWithVertexValue.java
index 645bd7c..e63fe99 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunctionWithVertexValue.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/EdgesFunctionWithVertexValue.java
@@ -18,11 +18,11 @@
 
 package org.apache.flink.graph;
 
-import java.io.Serializable;
-
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.util.Collector;
 
+import java.io.Serializable;
+
 /**
  * Interface to be implemented by the function applied to a vertex neighborhood
  * in the {@link Graph#groupReduceOnEdges(EdgesFunctionWithVertexValue, EdgeDirection)}
@@ -38,16 +38,16 @@ public interface EdgesFunctionWithVertexValue<K, VV, EV, O> extends Function, Se
 	/**
 	 * This method is called per vertex and can iterate over all of its neighboring edges
 	 * with the specified direction.
-	 * <p>
-	 * If called with {@link EdgeDirection#OUT} the group will contain
+	 *
+	 * <p>If called with {@link EdgeDirection#OUT} the group will contain
 	 * the out-edges of the grouping vertex.
 	 * If called with {@link EdgeDirection#IN} the group will contain
 	 * the in-edges of the grouping vertex.
 	 * If called with {@link EdgeDirection#ALL} the group will contain
 	 * all edges of the grouping vertex.
-	 * <p>
-	 * The method can emit any number of output elements, including none.
-	 * 
+	 *
+	 * <p>The method can emit any number of output elements, including none.
+	 *
 	 * @param vertex the grouping vertex
 	 * @param edges the neighboring edges of the grouping vertex.
 	 * @param out the collector to emit results to


[06/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java
index 7e8ebd7..c30b1a7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricIteration.java
@@ -54,22 +54,21 @@ import java.util.Map;
  * This class represents iterative graph computations, programmed in a vertex-centric perspective.
  * It is a special case of <i>Bulk Synchronous Parallel</i> computation. The paradigm has also been
  * implemented by Google's <i>Pregel</i> system and by <i>Apache Giraph</i>.
- * <p>
- * Vertex centric algorithms operate on graphs, which are defined through vertices and edges. The 
+ *
+ * <p>Vertex centric algorithms operate on graphs, which are defined through vertices and edges. The
  * algorithms send messages along the edges and update the state of vertices based on
  * the old state and the incoming messages. All vertices have an initial state.
  * The computation terminates once no vertex receives any message anymore.
  * Additionally, a maximum number of iterations (supersteps) may be specified.
- * <p>
- * The computation is here represented by one function:
+ *
+ * <p>The computation is here represented by one function:
  * <ul>
  *   <li>The {@link ComputeFunction} receives incoming messages, may update the state for
  *   the vertex, and sends messages along the edges of the vertex.
  *   </li>
  * </ul>
- * <p>
  *
- * Vertex-centric graph iterations are run by calling
+ * <p>Vertex-centric graph iterations are run by calling
  * {@link Graph#runVertexCentricIteration(ComputeFunction, MessageCombiner, int)}.
  *
  * @param <K> The type of the vertex key (the vertex identifier).
@@ -77,25 +76,25 @@ import java.util.Map;
  * @param <Message> The type of the message sent between vertices along the edges.
  * @param <EV> The type of the values that are associated with the edges.
  */
-public class VertexCentricIteration<K, VV, EV, Message> 
+public class VertexCentricIteration<K, VV, EV, Message>
 	implements CustomUnaryOperation<Vertex<K, VV>, Vertex<K, VV>> {
 
 	private final ComputeFunction<K, VV, EV, Message> computeFunction;
 
 	private final MessageCombiner<K, Message> combineFunction;
-	
+
 	private final DataSet<Edge<K, EV>> edgesWithValue;
-	
+
 	private final int maximumNumberOfIterations;
-	
+
 	private final TypeInformation<Message> messageType;
-	
+
 	private DataSet<Vertex<K, VV>> initialVertices;
 
 	private VertexCentricConfiguration configuration;
 
 	// ----------------------------------------------------------------------------------
-	
+
 	private VertexCentricIteration(ComputeFunction<K, VV, EV, Message> cf,
 			DataSet<Edge<K, EV>> edgesWithValue, MessageCombiner<K, Message> mc,
 			int maximumNumberOfIterations) {
@@ -108,45 +107,44 @@ public class VertexCentricIteration<K, VV, EV, Message>
 		this.computeFunction = cf;
 		this.edgesWithValue = edgesWithValue;
 		this.combineFunction = mc;
-		this.maximumNumberOfIterations = maximumNumberOfIterations;		
+		this.maximumNumberOfIterations = maximumNumberOfIterations;
 		this.messageType = getMessageType(cf);
 	}
-	
+
 	private TypeInformation<Message> getMessageType(ComputeFunction<K, VV, EV, Message> cf) {
 		return TypeExtractor.createTypeInfo(cf, ComputeFunction.class, cf.getClass(), 3);
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 	//  Custom Operator behavior
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Sets the input data set for this operator. In the case of this operator this input data set represents
 	 * the set of vertices with their initial state.
-	 * 
+	 *
 	 * @param inputData The input data set, which in the case of this operator represents the set of
 	 *                  vertices with their initial state.
-	 * 
+	 *
 	 * @see org.apache.flink.api.java.operators.CustomUnaryOperation#setInput(org.apache.flink.api.java.DataSet)
 	 */
 	@Override
 	public void setInput(DataSet<Vertex<K, VV>> inputData) {
 		this.initialVertices = inputData;
 	}
-	
+
 	/**
 	 * Creates the operator that represents this vertex-centric graph computation.
-	 * <p>
-	 *  The Pregel iteration is mapped to delta iteration as follows.
-	 *  The solution set consists of the set of active vertices and the workset contains the set of messages
-	 *  send to vertices during the previous superstep. Initially, the workset contains a null message for each vertex.
-	 *  In the beginning of a superstep, the solution set is joined with the workset to produce
-	 *  a dataset containing tuples of vertex state and messages (vertex inbox).
-	 *  The superstep compute UDF is realized with a coGroup between the vertices with inbox and the graph edges.
-	 *  The output of the compute UDF contains both the new vertex values and the new messages produced.
-	 *  These are directed to the solution set delta and new workset, respectively, with subsequent flatMaps.
-	 * <p/>
-	 * 
+	 *
+	 * <p>The Pregel iteration is mapped to delta iteration as follows.
+	 * The solution set consists of the set of active vertices and the workset contains the set of messages
+	 * send to vertices during the previous superstep. Initially, the workset contains a null message for each vertex.
+	 * In the beginning of a superstep, the solution set is joined with the workset to produce
+	 * a dataset containing tuples of vertex state and messages (vertex inbox).
+	 * The superstep compute UDF is realized with a coGroup between the vertices with inbox and the graph edges.
+	 * The output of the compute UDF contains both the new vertex values and the new messages produced.
+	 * These are directed to the solution set delta and new workset, respectively, with subsequent flatMaps.
+	 *
 	 * @return The operator that represents this vertex-centric graph computation.
 	 */
 	@Override
@@ -226,15 +224,15 @@ public class VertexCentricIteration<K, VV, EV, Message>
 
 	/**
 	 * Creates a new vertex-centric iteration operator.
-	 * 
+	 *
 	 * @param edgesWithValue The data set containing edges.
 	 * @param cf The compute function
-	 * 
+	 *
 	 * @param <K> The type of the vertex key (the vertex identifier).
 	 * @param <VV> The type of the vertex value (the state of the vertex).
 	 * @param <Message> The type of the message sent between vertices along the edges.
 	 * @param <EV> The type of the values that are associated with the edges.
-	 * 
+	 *
 	 * @return An instance of the vertex-centric graph computation operator.
 	 */
 	public static <K, VV, EV, Message> VertexCentricIteration<K, VV, EV, Message> withEdges(
@@ -248,16 +246,16 @@ public class VertexCentricIteration<K, VV, EV, Message>
 	/**
 	 * Creates a new vertex-centric iteration operator for graphs where the edges are associated with a value (such as
 	 * a weight or distance).
-	 * 
+	 *
 	 * @param edgesWithValue The data set containing edges.
 	 * @param cf The compute function.
 	 * @param mc The function that combines messages sent to a vertex during a superstep.
-	 * 
+	 *
 	 * @param <K> The type of the vertex key (the vertex identifier).
 	 * @param <VV> The type of the vertex value (the state of the vertex).
 	 * @param <Message> The type of the message sent between vertices along the edges.
 	 * @param <EV> The type of the values that are associated with the edges.
-	 * 
+	 *
 	 * @return An instance of the vertex-centric graph computation operator.
 	 */
 	public static <K, VV, EV, Message> VertexCentricIteration<K, VV, EV, Message> withEdges(
@@ -307,7 +305,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 			return outTuple;
 		}
 }
-	
+
 	/**
 	 * This coGroup class wraps the user-defined compute function.
 	 * The first input holds a Tuple2 containing the vertex state and its inbox.
@@ -341,7 +339,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 			}
 			this.computeFunction.preSuperstep();
 		}
-		
+
 		@Override
 		public void close() throws Exception {
 			this.computeFunction.postSuperstep();
@@ -365,7 +363,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 				if (getIterationRuntimeContext().getSuperstepNumber() == 1) {
 					// there are no messages during the 1st superstep
 				}
-				else {				
+				else {
 					messageIter.setFirst(first.f1.right());
 					@SuppressWarnings("unchecked")
 					Iterator<Tuple2<?, Either<NullValue, Message>>> downcastIter =
@@ -381,7 +379,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 
 	@SuppressWarnings("serial")
 	@ForwardedFields("f0")
-	public static class MessageCombinerUdf<K, Message> extends RichGroupReduceFunction<
+	private static class MessageCombinerUdf<K, Message> extends RichGroupReduceFunction<
 			Tuple2<K, Either<NullValue, Message>>, Tuple2<K, Either<NullValue, Message>>>
 			implements ResultTypeQueryable<Tuple2<K, Either<NullValue, Message>>>,
 			GroupCombineFunction<Tuple2<K, Either<NullValue, Message>>, Tuple2<K, Either<NullValue, Message>>> {
@@ -404,7 +402,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 		@Override
 		public void reduce(Iterable<Tuple2<K, Either<NullValue, Message>>> messages,
 				Collector<Tuple2<K, Either<NullValue, Message>>> out) throws Exception {
-			
+
 			final Iterator<Tuple2<K, Either<NullValue, Message>>> messageIterator = messages.iterator();
 
 			if (messageIterator.hasNext()) {
@@ -437,7 +435,7 @@ public class VertexCentricIteration<K, VV, EV, Message>
 	// --------------------------------------------------------------------------------------------
 
 	/**
-	 * Helper method which sets up an iteration with the given vertex value
+	 * Helper method which sets up an iteration with the given vertex value.
 	 *
 	 * @param iteration
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/GatherFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/GatherFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/GatherFunction.java
index 93b3a8c..63ba087 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/GatherFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/GatherFunction.java
@@ -32,7 +32,7 @@ import java.util.Collection;
  * This class must be extended by functions that compute the state of the vertex depending on the old state and the
  * incoming messages. The central method is {@link #updateVertex(Vertex, MessageIterator)}, which is
  * invoked once per vertex per superstep.
- * 
+ *
  * {@code <K>} The vertex key type.
  * {@code <VV>} The vertex value type.
  * {@code <Message>} The message type.
@@ -81,24 +81,24 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 	 * This method is invoked once per vertex per superstep. It receives the current state of the vertex, as well as
 	 * the incoming messages. It may set a new vertex state via {@link #setNewVertexValue(Object)}. If the vertex
 	 * state is changed, it will trigger the sending of messages via the {@link ScatterFunction}.
-	 * 
+	 *
 	 * @param vertex The vertex.
 	 * @param inMessages The incoming messages to this vertex.
-	 * 
+	 *
 	 * @throws Exception The computation may throw exceptions, which causes the superstep to fail.
 	 */
 	public abstract void updateVertex(Vertex<K, VV> vertex, MessageIterator<Message> inMessages) throws Exception;
 
 	/**
 	 * This method is executed once per superstep before the gather function is invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the pre-superstep phase cause the superstep to fail.
 	 */
 	public void preSuperstep() throws Exception {}
 
 	/**
 	 * This method is executed once per superstep after the gather function has been invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the post-superstep phase cause the superstep to fail.
 	 */
 	public void postSuperstep() throws Exception {}
@@ -106,16 +106,16 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 	/**
 	 * Sets the new value of this vertex. Setting a new value triggers the sending of outgoing messages from this vertex.
 	 *
-	 * This should be called at most once per updateVertex.
-	 * 
+	 * <p>This should be called at most once per updateVertex.
+	 *
 	 * @param newValue The new vertex value.
 	 */
 	public void setNewVertexValue(VV newValue) {
-		if(setNewVertexValueCalled) {
+		if (setNewVertexValueCalled) {
 			throw new IllegalStateException("setNewVertexValue should only be called at most once per updateVertex");
 		}
 		setNewVertexValueCalled = true;
-		if(isOptDegrees()) {
+		if (isOptDegrees()) {
 			outValWithDegrees.f1.f0 = newValue;
 			outWithDegrees.collect(outValWithDegrees);
 		} else {
@@ -126,7 +126,7 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 
 	/**
 	 * Gets the number of the superstep, starting at <tt>1</tt>.
-	 * 
+	 *
 	 * @return The number of the current superstep.
 	 */
 	public int getSuperstepNumber() {
@@ -136,7 +136,7 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 	/**
 	 * Gets the iteration aggregator registered under the given name. The iteration aggregator combines
 	 * all aggregates globally once per superstep and makes them available in the next superstep.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregator registered under this name, or null, if no aggregator was registered.
 	 */
@@ -146,7 +146,7 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 
 	/**
 	 * Get the aggregated value that an aggregator computed in the previous iteration.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregated value of the previous iteration.
 	 */
@@ -158,7 +158,7 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 	 * Gets the broadcast data set registered under the given name. Broadcast data sets
 	 * are available on all parallel instances of a function. They can be registered via
 	 * {@link org.apache.flink.graph.spargel.ScatterGatherConfiguration#addBroadcastSetForGatherFunction(String, org.apache.flink.api.java.DataSet)}.
-	 * 
+	 *
 	 * @param name The name under which the broadcast set is registered.
 	 * @return The broadcast data set.
 	 */
@@ -232,7 +232,7 @@ public abstract class GatherFunction<K, VV, Message> implements Serializable {
 	 * In order to hide the Tuple3(actualValue, inDegree, OutDegree) vertex value from the user,
 	 * another function will be called from {@link org.apache.flink.graph.spargel.ScatterGatherIteration}.
 	 *
-	 * This function will retrieve the vertex from the vertexState and will set its degrees, afterwards calling
+	 * <p>This function will retrieve the vertex from the vertexState and will set its degrees, afterwards calling
 	 * the regular updateVertex function.
 	 *
 	 * @param vertexState

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/MessageIterator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/MessageIterator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/MessageIterator.java
index d6fdc8a..be36954 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/MessageIterator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/MessageIterator.java
@@ -18,10 +18,10 @@
 
 package org.apache.flink.graph.spargel;
 
-import java.util.Iterator;
-
 import org.apache.flink.api.java.tuple.Tuple2;
 
+import java.util.Iterator;
+
 /**
  * An iterator that returns messages. The iterator is {@link java.lang.Iterable} at the same time to support
  * the <i>foreach</i> syntax.
@@ -30,24 +30,23 @@ public final class MessageIterator<Message> implements Iterator<Message>, Iterab
 	private static final long serialVersionUID = 1L;
 
 	private transient Iterator<Tuple2<?, Message>> source;
-	
-	
-	final void setSource(Iterator<Tuple2<?, Message>> source) {
+
+	void setSource(Iterator<Tuple2<?, Message>> source) {
 		this.source = source;
 	}
-	
+
 	@Override
-	public final boolean hasNext() {
+	public boolean hasNext() {
 		return this.source.hasNext();
 	}
-	
+
 	@Override
-	public final Message next() {
+	public Message next() {
 		return this.source.next().f1;
 	}
 
 	@Override
-	public final void remove() {
+	public void remove() {
 		throw new UnsupportedOperationException();
 	}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterFunction.java
index b99b5b7..0ffc441 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterFunction.java
@@ -34,7 +34,7 @@ import java.util.Iterator;
 
 /**
  * The base class for functions that produce messages between vertices as a part of a {@link ScatterGatherIteration}.
- * 
+ *
  * @param <K> The type of the vertex key (the vertex identifier).
  * @param <VV> The type of the vertex value (the state of the vertex).
  * @param <Message> The type of the message sent between vertices along the edges.
@@ -90,23 +90,23 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	/**
 	 * This method is invoked once per superstep for each vertex that was changed in that superstep.
 	 * It needs to produce the messages that will be received by vertices in the next superstep.
-	 * 
+	 *
 	 * @param vertex The vertex that was changed.
-	 * 
+	 *
 	 * @throws Exception The computation may throw exceptions, which causes the superstep to fail.
 	 */
 	public abstract void sendMessages(Vertex<K, VV> vertex) throws Exception;
 
 	/**
 	 * This method is executed once per superstep before the scatter function is invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the pre-superstep phase cause the superstep to fail.
 	 */
 	public void preSuperstep() throws Exception {}
 
 	/**
 	 * This method is executed once per superstep after the scatter function has been invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the post-superstep phase cause the superstep to fail.
 	 */
 	public void postSuperstep() throws Exception {}
@@ -115,11 +115,13 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	/**
 	 * Gets an {@link java.lang.Iterable} with all edges. This method is mutually exclusive with
 	 * {@link #sendMessageToAllNeighbors(Object)} and may be called only once.
-	 * <p>
-	 * If the {@link EdgeDirection} is OUT (default), then this iterator contains outgoing edges.
-	 * If the {@link EdgeDirection} is IN, then this iterator contains incoming edges.
-	 * If the {@link EdgeDirection} is ALL, then this iterator contains both outgoing and incoming edges.
-	 * 
+	 *
+	 * <p>If the {@link EdgeDirection} is OUT (default), then this iterator contains outgoing edges.
+	 *
+	 * <p>If the {@link EdgeDirection} is IN, then this iterator contains incoming edges.
+	 *
+	 * <p>If the {@link EdgeDirection} is ALL, then this iterator contains both outgoing and incoming edges.
+	 *
 	 * @return An iterator with all edges.
 	 */
 	@SuppressWarnings("unchecked")
@@ -135,11 +137,13 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	/**
 	 * Sends the given message to all vertices that are targets of an edge of the changed vertex.
 	 * This method is mutually exclusive to the method {@link #getEdges()} and may be called only once.
-	 * <p>
-	 * If the {@link EdgeDirection} is OUT (default), the message will be sent to out-neighbors.
-	 * If the {@link EdgeDirection} is IN, the message will be sent to in-neighbors.
-	 * If the {@link EdgeDirection} is ALL, the message will be sent to all neighbors.
-	 * 
+	 *
+	 * <p>If the {@link EdgeDirection} is OUT (default), the message will be sent to out-neighbors.
+	 *
+	 * <p>If the {@link EdgeDirection} is IN, the message will be sent to in-neighbors.
+	 *
+	 * <p>If the {@link EdgeDirection} is ALL, the message will be sent to all neighbors.
+	 *
 	 * @param m The message to send.
 	 */
 	public void sendMessageToAllNeighbors(Message m) {
@@ -155,16 +159,16 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 			Tuple next = (Tuple) edges.next();
 
 			/*
-			 * When EdgeDirection is OUT, the edges iterator only has the out-edges 
-			 * of the vertex, i.e. the ones where this vertex is src. 
+			 * When EdgeDirection is OUT, the edges iterator only has the out-edges
+			 * of the vertex, i.e. the ones where this vertex is src.
 			 * next.getField(1) gives the neighbor of the vertex running this ScatterFunction.
 			 */
 			if (getDirection().equals(EdgeDirection.OUT)) {
 				outValue.f0 = next.getField(1);
 			}
 			/*
-			 * When EdgeDirection is IN, the edges iterator only has the in-edges 
-			 * of the vertex, i.e. the ones where this vertex is trg. 
+			 * When EdgeDirection is IN, the edges iterator only has the in-edges
+			 * of the vertex, i.e. the ones where this vertex is trg.
 			 * next.getField(10) gives the neighbor of the vertex running this ScatterFunction.
 			 */
 			else if (getDirection().equals(EdgeDirection.IN)) {
@@ -188,7 +192,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	/**
 	 * Sends the given message to the vertex identified by the given key. If the target vertex does not exist,
 	 * the next superstep will cause an exception due to a non-deliverable message.
-	 * 
+	 *
 	 * @param target The key (id) of the target vertex to message.
 	 * @param m The message.
 	 */
@@ -202,7 +206,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 
 	/**
 	 * Gets the number of the superstep, starting at <tt>1</tt>.
-	 * 
+	 *
 	 * @return The number of the current superstep.
 	 */
 	public int getSuperstepNumber() {
@@ -212,7 +216,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	/**
 	 * Gets the iteration aggregator registered under the given name. The iteration aggregator combines
 	 * all aggregates globally once per superstep and makes them available in the next superstep.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregator registered under this name, or null, if no aggregator was registered.
 	 */
@@ -222,7 +226,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 
 	/**
 	 * Get the aggregated value that an aggregator computed in the previous iteration.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregated value of the previous iteration.
 	 */
@@ -234,7 +238,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 	 * Gets the broadcast data set registered under the given name. Broadcast data sets
 	 * are available on all parallel instances of a function. They can be registered via
 	 * {@link org.apache.flink.graph.spargel.ScatterGatherConfiguration#addBroadcastSetForScatterFunction(String, org.apache.flink.api.java.DataSet)}.
-	 * 
+	 *
 	 * @param name The name under which the broadcast set is registered.
 	 * @return The broadcast data set.
 	 */
@@ -277,9 +281,8 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 		this.edgesUsed = false;
 	}
 
-	private static final class EdgesIterator<K, EV> 
-		implements Iterator<Edge<K, EV>>, Iterable<Edge<K, EV>>
-	{
+	private static final class EdgesIterator<K, EV>
+		implements Iterator<Edge<K, EV>>, Iterable<Edge<K, EV>> {
 		private Iterator<Edge<K, EV>> input;
 
 		private Edge<K, EV> edge = new Edge<>();
@@ -306,6 +309,7 @@ public abstract class ScatterFunction<K, VV, Message, EV> implements Serializabl
 		public void remove() {
 			throw new UnsupportedOperationException();
 		}
+
 		@Override
 		public Iterator<Edge<K, EV>> iterator() {
 			return this;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java
index 4ac1ae1..6a62847 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherConfiguration.java
@@ -31,23 +31,23 @@ import java.util.List;
  * degree of parallelism, to register aggregators and use broadcast sets in
  * the {@link GatherFunction} and {@link ScatterFunction}
  *
- * The VertexCentricConfiguration object is passed as an argument to
+ * <p>The VertexCentricConfiguration object is passed as an argument to
  * {@link org.apache.flink.graph.Graph#runScatterGatherIteration (
  * org.apache.flink.graph.spargel.GatherFunction, org.apache.flink.graph.spargel.ScatterFunction, int,
  * ScatterGatherConfiguration)}.
  */
 public class ScatterGatherConfiguration extends IterationConfiguration {
 
-	/** the broadcast variables for the scatter function **/
+	// the broadcast variables for the scatter function
 	private List<Tuple2<String, DataSet<?>>> bcVarsScatter = new ArrayList<>();
 
-	/** the broadcast variables for the gather function **/
+	// the broadcast variables for the gather function
 	private List<Tuple2<String, DataSet<?>>> bcVarsGather = new ArrayList<>();
 
-	/** flag that defines whether the degrees option is set **/
+	// flag that defines whether the degrees option is set
 	private boolean optDegrees = false;
 
-	/** the direction in which the messages should be sent **/
+	// the direction in which the messages should be sent
 	private EdgeDirection direction = EdgeDirection.OUT;
 
 	public ScatterGatherConfiguration() {}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherIteration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherIteration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherIteration.java
index 9f5585d..e3f01a3 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherIteration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/spargel/ScatterGatherIteration.java
@@ -49,14 +49,14 @@ import java.util.Map;
 /**
  * This class represents iterative graph computations, programmed in a scatter-gather perspective.
  * It is a special case of <i>Bulk Synchronous Parallel</i> computation.
- * <p>
- * Scatter-Gather algorithms operate on graphs, which are defined through vertices and edges. The 
+ *
+ * <p>Scatter-Gather algorithms operate on graphs, which are defined through vertices and edges. The
  * algorithms send messages along the edges and update the state of vertices based on
  * the old state and the incoming messages. All vertices have an initial state.
  * The computation terminates once no vertex updates its state any more.
  * Additionally, a maximum number of iterations (supersteps) may be specified.
- * <p>
- * The computation is here represented by two functions:
+ *
+ * <p>The computation is here represented by two functions:
  * <ul>
  *   <li>The {@link GatherFunction} receives incoming messages and may updates the state for
  *   the vertex. If a state is updated, messages are sent from this vertex. Initially, all vertices are
@@ -64,9 +64,8 @@ import java.util.Map;
  *   <li>The {@link ScatterFunction} takes the new vertex state and sends messages along the outgoing
  *   edges of the vertex. The outgoing edges may optionally have an associated value, such as a weight.</li>
  * </ul>
- * <p>
  *
- * Scatter-Gather graph iterations are are run by calling
+ * <p>Scatter-Gather graph iterations are are run by calling
  * {@link Graph#runScatterGatherIteration(ScatterFunction, GatherFunction, int)}.
  *
  * @param <K> The type of the vertex key (the vertex identifier).
@@ -74,9 +73,8 @@ import java.util.Map;
  * @param <Message> The type of the message sent between vertices along the edges.
  * @param <EV> The type of the values that are associated with the edges.
  */
-public class ScatterGatherIteration<K, VV, Message, EV> 
-	implements CustomUnaryOperation<Vertex<K, VV>, Vertex<K, VV>>
-{
+public class ScatterGatherIteration<K, VV, Message, EV>
+	implements CustomUnaryOperation<Vertex<K, VV>, Vertex<K, VV>> {
 	private final ScatterFunction<K, VV, Message, EV> scatterFunction;
 
 	private final GatherFunction<K, VV, Message> gatherFunction;
@@ -95,9 +93,8 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 
 	private ScatterGatherIteration(ScatterFunction<K, VV, Message, EV> sf,
 			GatherFunction<K, VV, Message> gf,
-			DataSet<Edge<K, EV>> edgesWithValue, 
-			int maximumNumberOfIterations)
-	{
+			DataSet<Edge<K, EV>> edgesWithValue,
+			int maximumNumberOfIterations) {
 		Preconditions.checkNotNull(sf);
 		Preconditions.checkNotNull(gf);
 		Preconditions.checkNotNull(edgesWithValue);
@@ -121,10 +118,10 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 	/**
 	 * Sets the input data set for this operator. In the case of this operator this input data set represents
 	 * the set of vertices with their initial state.
-	 * 
+	 *
 	 * @param inputData The input data set, which in the case of this operator represents the set of
 	 *                  vertices with their initial state.
-	 * 
+	 *
 	 * @see org.apache.flink.api.java.operators.CustomUnaryOperation#setInput(org.apache.flink.api.java.DataSet)
 	 */
 	@Override
@@ -134,7 +131,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 
 	/**
 	 * Creates the operator that represents this scatter-gather graph computation.
-	 * 
+	 *
 	 * @return The operator that represents this scatter-gather graph computation.
 	 */
 	@Override
@@ -163,7 +160,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 			}
 		}
 
-		if(this.configuration != null) {
+		if (this.configuration != null) {
 			scatterFunction.setDirection(this.configuration.getDirection());
 		} else {
 			scatterFunction.setDirection(EdgeDirection.OUT);
@@ -174,7 +171,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 
 		// check whether the degrees option is set and, if so, compute the in and the out degrees and
 		// add them to the vertex value
-		if(this.configuration != null && this.configuration.isOptDegrees()) {
+		if (this.configuration != null && this.configuration.isOptDegrees()) {
 			return createResultVerticesWithDegrees(graph, messagingDirection, messageTypeInfo, numberOfVertices);
 		} else {
 			return createResultSimpleVertex(messagingDirection, messageTypeInfo, numberOfVertices);
@@ -184,7 +181,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 	/**
 	 * Creates a new scatter-gather iteration operator for graphs where the edges are associated with a value (such as
 	 * a weight or distance).
-	 * 
+	 *
 	 * @param edgesWithValue The data set containing edges.
 	 * @param sf The function that turns changed vertex states into messages along the edges.
 	 * @param gf The function that updates the state of the vertices from the incoming messages.
@@ -193,13 +190,13 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 	 * @param <VV> The type of the vertex value (the state of the vertex).
 	 * @param <Message> The type of the message sent between vertices along the edges.
 	 * @param <EV> The type of the values that are associated with the edges.
-	 * 
+	 *
 	 * @return An in stance of the scatter-gather graph computation operator.
 	 */
 	public static <K, VV, Message, EV> ScatterGatherIteration<K, VV, Message, EV> withEdges(
-		DataSet<Edge<K, EV>> edgesWithValue, ScatterFunction<K, VV, Message, EV> sf,
-		GatherFunction<K, VV, Message> gf, int maximumNumberOfIterations)
-	{
+			DataSet<Edge<K, EV>> edgesWithValue, ScatterFunction<K, VV, Message, EV> sf,
+			GatherFunction<K, VV, Message> gf, int maximumNumberOfIterations) {
+
 		return new ScatterGatherIteration<>(sf, gf, edgesWithValue, maximumNumberOfIterations);
 	}
 
@@ -226,20 +223,18 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 	/*
 	 * UDF that encapsulates the message sending function for graphs where the edges have an associated value.
 	 */
-	private static abstract class ScatterUdfWithEdgeValues<K, VVWithDegrees, VV, Message, EV>
+	private abstract static class ScatterUdfWithEdgeValues<K, VVWithDegrees, VV, Message, EV>
 			extends RichCoGroupFunction<Edge<K, EV>, Vertex<K, VVWithDegrees>, Tuple2<K, Message>>
-			implements ResultTypeQueryable<Tuple2<K, Message>>
-	{
+			implements ResultTypeQueryable<Tuple2<K, Message>> {
+
 		private static final long serialVersionUID = 1L;
 
 		final ScatterFunction<K, VV, Message, EV> scatterFunction;
 
 		private transient TypeInformation<Tuple2<K, Message>> resultType;
 
-
 		private ScatterUdfWithEdgeValues(ScatterFunction<K, VV, Message, EV> scatterFunction,
-				TypeInformation<Tuple2<K, Message>> resultType)
-		{
+				TypeInformation<Tuple2<K, Message>> resultType) {
 			this.scatterFunction = scatterFunction;
 			this.resultType = resultType;
 		}
@@ -322,10 +317,10 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 		}
 	}
 
-	private static abstract class GatherUdf<K, VVWithDegrees, Message> extends RichCoGroupFunction<
-		Tuple2<K, Message>, Vertex<K, VVWithDegrees>, Vertex<K, VVWithDegrees>>
-		implements ResultTypeQueryable<Vertex<K, VVWithDegrees>>
-	{
+	private abstract static class GatherUdf<K, VVWithDegrees, Message> extends RichCoGroupFunction<
+			Tuple2<K, Message>, Vertex<K, VVWithDegrees>, Vertex<K, VVWithDegrees>>
+			implements ResultTypeQueryable<Vertex<K, VVWithDegrees>> {
+
 		private static final long serialVersionUID = 1L;
 
 		final GatherFunction<K, VVWithDegrees, Message> gatherFunction;
@@ -334,10 +329,9 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 
 		private transient TypeInformation<Vertex<K, VVWithDegrees>> resultType;
 
-
 		private GatherUdf(GatherFunction<K, VVWithDegrees, Message> gatherFunction,
-				TypeInformation<Vertex<K, VVWithDegrees>> resultType)
-		{
+				TypeInformation<Vertex<K, VVWithDegrees>> resultType) {
+
 			this.gatherFunction = gatherFunction;
 			this.resultType = resultType;
 		}
@@ -477,7 +471,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 
 		// configure coGroup message function with name and broadcast variables
 		messages = messages.name("Messaging");
-		if(this.configuration != null) {
+		if (this.configuration != null) {
 			for (Tuple2<String, DataSet<?>> e : this.configuration.getScatterBcastVars()) {
 				messages = messages.withBroadcastSet(e.f1, e.f0);
 			}
@@ -529,7 +523,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 	}
 
 	/**
-	 * Helper method which sets up an iteration with the given vertex value(either simple or with degrees)
+	 * Helper method which sets up an iteration with the given vertex value(either simple or with degrees).
 	 *
 	 * @param iteration
 	 */
@@ -582,7 +576,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 				break;
 			case ALL:
 				messages = buildScatterFunction(iteration, messageTypeInfo, 1, 0, numberOfVertices)
-						.union(buildScatterFunction(iteration, messageTypeInfo, 0, 0, numberOfVertices)) ;
+						.union(buildScatterFunction(iteration, messageTypeInfo, 0, 0, numberOfVertices));
 				break;
 			default:
 				throw new IllegalArgumentException("Illegal edge direction");
@@ -660,7 +654,7 @@ public class ScatterGatherIteration<K, VV, Message, EV>
 				break;
 			case ALL:
 				messages = buildScatterFunctionVerticesWithDegrees(iteration, messageTypeInfo, 1, 0, numberOfVertices)
-						.union(buildScatterFunctionVerticesWithDegrees(iteration, messageTypeInfo, 0, 0, numberOfVertices)) ;
+						.union(buildScatterFunctionVerticesWithDegrees(iteration, messageTypeInfo, 0, 0, numberOfVertices));
 				break;
 			default:
 				throw new IllegalArgumentException("Illegal edge direction");

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArray.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArray.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArray.java
index 0e3812d..c5b0fc8 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArray.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArray.java
@@ -21,7 +21,7 @@ package org.apache.flink.graph.types.valuearray;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataOutputView;
 import org.apache.flink.core.memory.MemorySegment;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.util.Preconditions;
 
@@ -53,7 +53,7 @@ implements ValueArray<IntValue> {
 	private transient int mark;
 
 	// hasher used to generate the normalized key
-	private Murmur3_32 hash = new Murmur3_32(0x11d2d865);
+	private MurmurHash hash = new MurmurHash(0x11d2d865);
 
 	// hash result stored as normalized key
 	private IntValue hashValue = new IntValue();
@@ -120,7 +120,7 @@ implements ValueArray<IntValue> {
 	@Override
 	public String toString() {
 		StringBuilder sb = new StringBuilder("[");
-		for (int idx = 0 ; idx < this.position ; idx++) {
+		for (int idx = 0; idx < this.position; idx++) {
 			sb.append(data[idx]);
 			if (idx < position - 1) {
 				sb.append(",");
@@ -178,7 +178,7 @@ implements ValueArray<IntValue> {
 	public void write(DataOutputView out) throws IOException {
 		out.writeInt(position);
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			out.writeInt(data[i]);
 		}
 	}
@@ -190,7 +190,7 @@ implements ValueArray<IntValue> {
 
 		ensureCapacity(position);
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			data[i] = in.readInt();
 		}
 	}
@@ -209,7 +209,7 @@ implements ValueArray<IntValue> {
 		hash.reset();
 
 		hash.hash(position);
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash.hash(data[i]);
 		}
 
@@ -226,7 +226,7 @@ implements ValueArray<IntValue> {
 		IntValueArray other = (IntValueArray) o;
 
 		int min = Math.min(position, other.position);
-		for (int i = 0 ; i < min ; i++) {
+		for (int i = 0; i < min; i++) {
 			int cmp = Integer.compare(data[i], other.data[i]);
 
 			if (cmp != 0) {
@@ -245,7 +245,7 @@ implements ValueArray<IntValue> {
 	public int hashCode() {
 		int hash = 1;
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash = 31 * hash + data[i];
 		}
 
@@ -261,7 +261,7 @@ implements ValueArray<IntValue> {
 				return false;
 			}
 
-			for (int i = 0 ; i < position ; i++) {
+			for (int i = 0; i < position; i++) {
 				if (data[i] != other.data[i]) {
 					return false;
 				}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparator.java
index bbc9bc5..7e1a6dc 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparator.java
@@ -30,7 +30,7 @@ import java.io.IOException;
 /**
  * Specialized comparator for IntValueArray based on CopyableValueComparator.
  *
- * This can be used for grouping keys but not for sorting keys.
+ * <p>This can be used for grouping keys but not for sorting keys.
  */
 @Internal
 public class IntValueArrayComparator extends TypeComparator<IntValueArray> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArray.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArray.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArray.java
index 7c01e6c..b3b4a79 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArray.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArray.java
@@ -21,7 +21,7 @@ package org.apache.flink.graph.types.valuearray;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataOutputView;
 import org.apache.flink.core.memory.MemorySegment;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.util.Preconditions;
@@ -54,7 +54,7 @@ implements ValueArray<LongValue> {
 	private transient int mark;
 
 	// hasher used to generate the normalized key
-	private Murmur3_32 hash = new Murmur3_32(0xdf099ea8);
+	private MurmurHash hash = new MurmurHash(0xdf099ea8);
 
 	// hash result stored as normalized key
 	private IntValue hashValue = new IntValue();
@@ -121,7 +121,7 @@ implements ValueArray<LongValue> {
 	@Override
 	public String toString() {
 		StringBuilder sb = new StringBuilder("[");
-		for (int idx = 0 ; idx < this.position ; idx++) {
+		for (int idx = 0; idx < this.position; idx++) {
 			sb.append(data[idx]);
 			if (idx < position - 1) {
 				sb.append(",");
@@ -179,7 +179,7 @@ implements ValueArray<LongValue> {
 	public void write(DataOutputView out) throws IOException {
 		out.writeInt(position);
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			out.writeLong(data[i]);
 		}
 	}
@@ -191,7 +191,7 @@ implements ValueArray<LongValue> {
 
 		ensureCapacity(position);
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			data[i] = in.readLong();
 		}
 	}
@@ -210,7 +210,7 @@ implements ValueArray<LongValue> {
 		hash.reset();
 
 		hash.hash(position);
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash.hash(data[i]);
 		}
 
@@ -227,7 +227,7 @@ implements ValueArray<LongValue> {
 		LongValueArray other = (LongValueArray) o;
 
 		int min = Math.min(position, other.position);
-		for (int i = 0 ; i < min ; i++) {
+		for (int i = 0; i < min; i++) {
 			int cmp = Long.compare(data[i], other.data[i]);
 
 			if (cmp != 0) {
@@ -246,7 +246,7 @@ implements ValueArray<LongValue> {
 	public int hashCode() {
 		int hash = 1;
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash = 31 * hash + (int) (data[i] ^ data[i] >>> 32);
 		}
 
@@ -262,7 +262,7 @@ implements ValueArray<LongValue> {
 				return false;
 			}
 
-			for (int i = 0 ; i < position ; i++) {
+			for (int i = 0; i < position; i++) {
 				if (data[i] != other.data[i]) {
 					return false;
 				}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparator.java
index 26c3da2..278b1a1 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparator.java
@@ -30,7 +30,7 @@ import java.io.IOException;
 /**
  * Specialized comparator for LongValueArray based on CopyableValueComparator.
  *
- * This can be used for grouping keys but not for sorting keys.
+ * <p>This can be used for grouping keys but not for sorting keys.
  */
 @Internal
 public class LongValueArrayComparator extends TypeComparator<LongValueArray> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArray.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArray.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArray.java
index bf247a2..6581550 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArray.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArray.java
@@ -62,7 +62,7 @@ implements ValueArray<NullValue> {
 	@Override
 	public String toString() {
 		StringBuilder sb = new StringBuilder("[");
-		for (int idx = 0 ; idx < this.position ; idx++) {
+		for (int idx = 0; idx < this.position; idx++) {
 			sb.append("∅");
 			if (idx < position - 1) {
 				sb.append(",");

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparator.java
index 2228d6e..6d28cc6 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparator.java
@@ -30,7 +30,7 @@ import java.io.IOException;
 /**
  * Specialized comparator for NullValueArray based on CopyableValueComparator.
  *
- * This can be used for grouping keys but not for sorting keys.
+ * <p>This can be used for grouping keys but not for sorting keys.
  */
 @Internal
 public class NullValueArrayComparator extends TypeComparator<NullValueArray> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArray.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArray.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArray.java
index 4699552..fabe990 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArray.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArray.java
@@ -21,7 +21,7 @@ package org.apache.flink.graph.types.valuearray;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataOutputView;
 import org.apache.flink.core.memory.MemorySegment;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.StringValue;
 import org.apache.flink.util.Preconditions;
@@ -33,12 +33,12 @@ import java.util.Iterator;
 
 /**
  * An array of {@link StringValue}.
- * <p>
- * Strings are serialized to a byte array. Concatenating arrays is as simple
+ *
+ * <p>Strings are serialized to a byte array. Concatenating arrays is as simple
  * and fast as extending and copying byte arrays. Strings are serialized when
  * individually added to {@code StringValueArray}.
- * <p>
- * For each string added to the array the length is first serialized using a
+ *
+ * <p>For each string added to the array the length is first serialized using a
  * variable length integer. Then the string characters are serialized using a
  * variable length encoding where the lower 128 ASCII/UFT-8 characters are
  * encoded in a single byte. This ensures that common characters are serialized
@@ -74,7 +74,7 @@ implements ValueArray<StringValue> {
 	private transient int markPosition;
 
 	// hasher used to generate the normalized key
-	private Murmur3_32 hash = new Murmur3_32(0x19264330);
+	private MurmurHash hash = new MurmurHash(0x19264330);
 
 	// hash result stored as normalized key
 	private IntValue hashValue = new IntValue();
@@ -276,7 +276,7 @@ implements ValueArray<StringValue> {
 		hash.reset();
 
 		hash.hash(position);
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash.hash(data[i]);
 		}
 
@@ -300,7 +300,7 @@ implements ValueArray<StringValue> {
 			return cmp;
 		}
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			cmp = Byte.compare(data[i], other.data[i]);
 
 			if (cmp != 0) {
@@ -319,7 +319,7 @@ implements ValueArray<StringValue> {
 	public int hashCode() {
 		int hash = 1;
 
-		for (int i = 0 ; i < position ; i++) {
+		for (int i = 0; i < position; i++) {
 			hash = 31 * hash + data[i];
 		}
 
@@ -339,7 +339,7 @@ implements ValueArray<StringValue> {
 				return false;
 			}
 
-			for (int i = 0 ; i < position ; i++) {
+			for (int i = 0; i < position; i++) {
 				if (data[i] != other.data[i]) {
 					return false;
 				}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparator.java
index df88a8e..9d1c0f7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparator.java
@@ -32,7 +32,7 @@ import static org.apache.flink.graph.types.valuearray.StringValueArray.HIGH_BIT;
 /**
  * Specialized comparator for StringValueArray based on CopyableValueComparator.
  *
- * This can be used for grouping keys but not for sorting keys.
+ * <p>This can be used for grouping keys but not for sorting keys.
  */
 @Internal
 public class StringValueArrayComparator extends TypeComparator<StringValueArray> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArray.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArray.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArray.java
index 6e34b71..35ebd6b 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArray.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArray.java
@@ -30,7 +30,7 @@ import java.io.Serializable;
 /**
  * Basic interface for array types which reuse objects during serialization.
  *
- * Value arrays are usable as grouping keys but not sorting keys.
+ * <p>Value arrays are usable as grouping keys but not sorting keys.
  *
  * @param <T> the {@link Value} type
  */
@@ -76,7 +76,7 @@ extends Iterable<T>, IOReadableWritable, Serializable, NormalizableKey<ValueArra
 	/**
 	 * Saves the array index, which can be restored by calling {@code reset()}.
 	 *
-	 * This is not serialized and is not part of the contract for
+	 * <p>This is not serialized and is not part of the contract for
 	 * {@link #equals(Object)}.
 	 */
 	void mark();
@@ -91,7 +91,7 @@ extends Iterable<T>, IOReadableWritable, Serializable, NormalizableKey<ValueArra
 	 * expected to release the underlying data structure. This allows the array
 	 * to be reused with minimal impact on the garbage collector.
 	 *
-	 * This may reset the {@link #mark()} in order to allow arrays be shrunk.
+	 * <p>This may reset the {@link #mark()} in order to allow arrays be shrunk.
 	 */
 	void clear();
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArrayFactory.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArrayFactory.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArrayFactory.java
index b7b6282..2426550 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArrayFactory.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/types/valuearray/ValueArrayFactory.java
@@ -31,7 +31,7 @@ import org.apache.flink.types.Value;
  * for creating a {@link ValueArray}. Algorithms must instantiate classes at
  * runtime when the type information has been erased.
  *
- * This mirrors creating {@link Value} using {@link CopyableValue#copy()}.
+ * <p>This mirrors creating {@link Value} using {@link CopyableValue#copy()}.
  */
 public class ValueArrayFactory {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple2Map.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple2Map.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple2Map.java
index 1e500ea..0a9afa2 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple2Map.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple2Map.java
@@ -24,7 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.graph.Edge;
 
 /**
- * Create a Tuple2 DataSet from the vertices of an Edge DataSet
+ * Create a Tuple2 DataSet from the vertices of an Edge DataSet.
  *
  * @param <K> edge ID type
  * @param <EV> edge value type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple3Map.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple3Map.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple3Map.java
index a050ee8..3b65933 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple3Map.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/EdgeToTuple3Map.java
@@ -24,7 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.graph.Edge;
 
 /**
- * Create a Tuple3 DataSet from an Edge DataSet
+ * Create a Tuple3 DataSet from an Edge DataSet.
  *
  * @param <K> edge ID type
  * @param <EV> edge value type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java
index 5292751..3e2ac23 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/GraphUtils.java
@@ -30,8 +30,13 @@ import org.apache.flink.types.LongValue;
 
 import static org.apache.flink.api.java.typeutils.ValueTypeInfo.LONG_VALUE_TYPE_INFO;
 
+/**
+ * {@link Graph} utilities.
+ */
 public class GraphUtils {
 
+	private GraphUtils() {}
+
 	/**
 	 * Count the number of elements in a DataSet.
 	 *
@@ -64,7 +69,7 @@ public class GraphUtils {
 	/**
 	 * The identity mapper returns the input as output.
 	 *
-	 * This does not forward fields and is used to break an operator chain.
+	 * <p>This does not forward fields and is used to break an operator chain.
 	 *
 	 * @param <T> element type
 	 */
@@ -107,7 +112,7 @@ public class GraphUtils {
 
 		@Override
 		public TypeInformation<O> getProducedType() {
-			return (TypeInformation<O>)TypeExtractor.createTypeInfo(value.getClass());
+			return (TypeInformation<O>) TypeExtractor.createTypeInfo(value.getClass());
 		}
 	}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Murmur3_32.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Murmur3_32.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Murmur3_32.java
deleted file mode 100644
index f48feb2..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Murmur3_32.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.utils;
-
-import java.io.Serializable;
-
-/**
- * A resettable implementation of the 32-bit MurmurHash algorithm.
- */
-public class Murmur3_32 implements Serializable {
-
-	private static final long serialVersionUID = 1L;
-
-	// initial seed, which can be reset
-	private final int seed;
-
-	// number of 32-bit values processed
-	private int count;
-
-	// in-progress hash value
-	private int hash;
-
-	/**
-	 * A resettable implementation of the 32-bit MurmurHash algorithm.
-	 *
-	 * @param seed MurmurHash seed
-	 */
-	public Murmur3_32(int seed) {
-		this.seed = seed;
-		reset();
-	}
-
-	/**
-	 * Re-initialize the MurmurHash state.
-	 *
-	 * @return this
-	 */
-	public Murmur3_32 reset() {
-		count = 0;
-		hash = seed;
-		return this;
-	}
-
-	/**
-	 * Process a {@code double} value.
-	 *
-	 * @param input 64-bit input value
-	 * @return this
-	 */
-	public Murmur3_32 hash(double input) {
-		hash(Double.doubleToLongBits(input));
-		return this;
-	}
-
-	/**
-	 * Process a {@code float} value.
-	 *
-	 * @param input 32-bit input value
-	 * @return this
-	 */
-	public Murmur3_32 hash(float input) {
-		hash(Float.floatToIntBits(input));
-		return this;
-	}
-
-	/**
-	 * Process an {@code integer} value.
-	 *
-	 * @param input 32-bit input value
-	 * @return this
-	 */
-	public Murmur3_32 hash(int input) {
-		count++;
-
-		input *= 0xcc9e2d51;
-		input = Integer.rotateLeft(input, 15);
-		input *= 0x1b873593;
-
-		hash ^= input;
-		hash = Integer.rotateLeft(hash, 13);
-		hash = hash * 5 + 0xe6546b64;
-
-		return this;
-	}
-
-	/**
-	 * Process a {@code long} value.
-	 *
-	 * @param input 64-bit input value
-	 * @return this
-	 */
-	public Murmur3_32 hash(long input) {
-		hash((int)(input >>> 32));
-		hash((int)input);
-		return this;
-	}
-
-	/**
-	 * Finalize and return the MurmurHash output.
-	 *
-	 * @return 32-bit hash
-	 */
-	public int hash() {
-		hash ^= 4 * count;
-		hash ^= hash >>> 16;
-		hash *= 0x85ebca6b;
-		hash ^= hash >>> 13;
-		hash *= 0xc2b2ae35;
-		hash ^= hash >>> 16;
-
-		return hash;
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/MurmurHash.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/MurmurHash.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/MurmurHash.java
new file mode 100644
index 0000000..484e793
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/MurmurHash.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.utils;
+
+import java.io.Serializable;
+
+/**
+ * A resettable implementation of the 32-bit MurmurHash algorithm.
+ */
+public class MurmurHash implements Serializable {
+
+	private static final long serialVersionUID = 1L;
+
+	// initial seed, which can be reset
+	private final int seed;
+
+	// number of 32-bit values processed
+	private int count;
+
+	// in-progress hash value
+	private int hash;
+
+	/**
+	 * A resettable implementation of the 32-bit MurmurHash algorithm.
+	 *
+	 * @param seed MurmurHash seed
+	 */
+	public MurmurHash(int seed) {
+		this.seed = seed;
+		reset();
+	}
+
+	/**
+	 * Re-initialize the MurmurHash state.
+	 *
+	 * @return this
+	 */
+	public MurmurHash reset() {
+		count = 0;
+		hash = seed;
+		return this;
+	}
+
+	/**
+	 * Process a {@code double} value.
+	 *
+	 * @param input 64-bit input value
+	 * @return this
+	 */
+	public MurmurHash hash(double input) {
+		hash(Double.doubleToLongBits(input));
+		return this;
+	}
+
+	/**
+	 * Process a {@code float} value.
+	 *
+	 * @param input 32-bit input value
+	 * @return this
+	 */
+	public MurmurHash hash(float input) {
+		hash(Float.floatToIntBits(input));
+		return this;
+	}
+
+	/**
+	 * Process an {@code integer} value.
+	 *
+	 * @param input 32-bit input value
+	 * @return this
+	 */
+	public MurmurHash hash(int input) {
+		count++;
+
+		input *= 0xcc9e2d51;
+		input = Integer.rotateLeft(input, 15);
+		input *= 0x1b873593;
+
+		hash ^= input;
+		hash = Integer.rotateLeft(hash, 13);
+		hash = hash * 5 + 0xe6546b64;
+
+		return this;
+	}
+
+	/**
+	 * Process a {@code long} value.
+	 *
+	 * @param input 64-bit input value
+	 * @return this
+	 */
+	public MurmurHash hash(long input) {
+		hash((int) (input >>> 32));
+		hash((int) input);
+		return this;
+	}
+
+	/**
+	 * Finalize and return the MurmurHash output.
+	 *
+	 * @return 32-bit hash
+	 */
+	public int hash() {
+		hash ^= 4 * count;
+		hash ^= hash >>> 16;
+		hash *= 0x85ebca6b;
+		hash ^= hash >>> 13;
+		hash *= 0xc2b2ae35;
+		hash ^= hash >>> 16;
+
+		return hash;
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToEdgeMap.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToEdgeMap.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToEdgeMap.java
index 5eb8287..23d7dd4 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToEdgeMap.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToEdgeMap.java
@@ -27,7 +27,7 @@ import org.apache.flink.types.NullValue;
 /**
  * Create an Edge from a Tuple2.
  *
- * The new edge's value is set to {@link NullValue}.
+ * <p>The new edge's value is set to {@link NullValue}.
  *
  * @param <K> edge ID type
  */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToVertexMap.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToVertexMap.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToVertexMap.java
index 636ed7b..71639a7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToVertexMap.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple2ToVertexMap.java
@@ -24,7 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.graph.Vertex;
 
 /**
- * Create a Vertex DataSet from a Tuple2 DataSet
+ * Create a Vertex DataSet from a Tuple2 DataSet.
  *
  * @param <K> vertex ID type
  * @param <VV> vertex value type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple3ToEdgeMap.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple3ToEdgeMap.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple3ToEdgeMap.java
index 8ea54b4..4416e40 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple3ToEdgeMap.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/Tuple3ToEdgeMap.java
@@ -24,7 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.graph.Edge;
 
 /**
- * Create an Edge DataSet from a Tuple3 DataSet
+ * Create an Edge DataSet from a Tuple3 DataSet.
  *
  * @param <K> edge ID type
  * @param <EV> edge value type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/VertexToTuple2Map.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/VertexToTuple2Map.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/VertexToTuple2Map.java
index b1f996c..2a0310e 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/VertexToTuple2Map.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/VertexToTuple2Map.java
@@ -24,7 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.graph.Vertex;
 
 /**
- * Create a Tuple2 DataSet from a Vertex DataSet
+ * Create a Tuple2 DataSet from a Vertex DataSet.
  *
  * @param <K> vertex ID type
  * @param <VV> vertex value type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingDataSet.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingDataSet.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingDataSet.java
index 11e7a64..838c021 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingDataSet.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingDataSet.java
@@ -18,13 +18,14 @@
 
 package org.apache.flink.graph.utils.proxy;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.operators.NoOpOperator;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.GraphAlgorithm;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -101,7 +102,7 @@ implements GraphAlgorithm<K, VV, EV, DataSet<T>> {
 			return true;
 		}
 
-		if (! GraphAlgorithmWrappingDataSet.class.isAssignableFrom(obj.getClass())) {
+		if (!GraphAlgorithmWrappingDataSet.class.isAssignableFrom(obj.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingGraph.java
index 69a6c37..c2aba4e 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/GraphAlgorithmWrappingGraph.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.utils.proxy;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.operators.NoOpOperator;
 import org.apache.flink.graph.Edge;
@@ -27,6 +25,9 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.GraphAlgorithm;
 import org.apache.flink.graph.Vertex;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -107,7 +108,7 @@ implements GraphAlgorithm<IN_K, IN_VV, IN_EV, Graph<OUT_K, OUT_VV, OUT_EV>> {
 			return true;
 		}
 
-		if (! GraphAlgorithmWrappingGraph.class.isAssignableFrom(obj.getClass())) {
+		if (!GraphAlgorithmWrappingGraph.class.isAssignableFrom(obj.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/OptionalBoolean.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/OptionalBoolean.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/OptionalBoolean.java
index 7a7208a..e672434 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/OptionalBoolean.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/utils/proxy/OptionalBoolean.java
@@ -22,13 +22,16 @@ import org.apache.flink.graph.GraphAlgorithm;
 
 /**
  * A multi-state boolean.
- * <p>
- * This class is used by {@link GraphAlgorithm} configuration options to set a
+ *
+ * <p>This class is used by {@link GraphAlgorithm} configuration options to set a
  * default value which can be overwritten. The default value is also used when
  * algorithm configurations are merged and conflict.
  */
 public class OptionalBoolean {
 
+	/**
+	 * States for {@link OptionalBoolean}.
+	 */
 	protected enum State {
 		UNSET,
 		FALSE,
@@ -114,10 +117,10 @@ public class OptionalBoolean {
 	}
 
 	/**
-	 * State transitions:
-	 *  if the states are the same then no change
-	 *  if either state is unset then change to the other state
-	 *  if the states are conflicting then set to the conflicting state
+	 * State transitions.
+	 * - if the states are the same then no change
+	 * - if either state is unset then change to the other state
+	 * - if the states are conflicting then set to the conflicting state
 	 *
 	 * @param other object from which to merge state
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/GraphValidator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/GraphValidator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/GraphValidator.java
index 75b672c..44635ca 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/GraphValidator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/GraphValidator.java
@@ -18,13 +18,13 @@
 
 package org.apache.flink.graph.validation;
 
-import java.io.Serializable;
-
 import org.apache.flink.graph.Graph;
 
+import java.io.Serializable;
+
 /**
  * A utility for defining validation criteria for different types of Graphs.
- * 
+ *
  * @param <K> the vertex key type
  * @param <VV> the vertex value type
  * @param <EV> the edge value type
@@ -34,4 +34,4 @@ public abstract class GraphValidator<K, VV, EV>	implements Serializable {
 
 	public abstract boolean validate(Graph<K, VV, EV> graph) throws Exception;
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java
index b620dd8..57aa987 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/validation/InvalidVertexIdsValidator.java
@@ -28,13 +28,20 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.util.Collector;
 
+/**
+ * Validate that the edge set vertex IDs exist in the vertex set.
+ *
+ * @param <K> graph ID type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
+ */
 @SuppressWarnings("serial")
 public class InvalidVertexIdsValidator<K, VV, EV> extends GraphValidator<K, VV, EV> {
 
 	/**
 	 * Checks that the edge set input contains valid vertex Ids, i.e. that they
 	 * also exist in the vertex input set.
-	 * 
+	 *
 	 * @return a boolean stating whether a graph is valid
 	 *         with respect to its vertex ids.
 	 */
@@ -71,4 +78,4 @@ public class InvalidVertexIdsValidator<K, VV, EV> extends GraphValidator<K, VV,
 		}
 	}
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java
index 469a23f..f89d4f5 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/AsmTestBase.java
@@ -28,16 +28,20 @@ import org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Before;
 
 import java.util.LinkedList;
 import java.util.List;
 
+/**
+ * Simple graphs for testing graph assembly functions.
+ */
 public class AsmTestBase {
 
 	protected ExecutionEnvironment env;
 
-	protected final double ACCURACY = 0.000001;
+	protected static final double ACCURACY = 0.000001;
 
 	// simple graph
 	protected Graph<IntValue, NullValue, NullValue> directedSimpleGraph;
@@ -95,12 +99,14 @@ public class AsmTestBase {
 	 * scale=10 and edgeFactor=16 but algorithms generating very large DataSets
 	 * require smaller input graphs.
 	 *
-	 * The examples program can write this graph as a CSV file for verifying
+	 * <p>The examples program can write this graph as a CSV file for verifying
 	 * algorithm results with external libraries:
 	 *
+	 * <pre>
 	 * ./bin/flink run examples/flink-gelly-examples_*.jar --algorithm EdgeList \
 	 *     --input RMatGraph --type long --simplify directed --scale $SCALE --edge_factor $EDGE_FACTOR \
 	 *     --output csv --filename directedRMatGraph.csv
+	 * </pre>
 	 *
 	 * @param scale vertices are generated in the range [0, 2<sup>scale</sup>)
 	 * @param edgeFactor the edge count is {@code edgeFactor} * 2<sup>scale</sup>
@@ -122,12 +128,14 @@ public class AsmTestBase {
 	 * scale=10 and edgeFactor=16 but algorithms generating very large DataSets
 	 * require smaller input graphs.
 	 *
-	 * The examples program can write this graph as a CSV file for verifying
+	 * <p>The examples program can write this graph as a CSV file for verifying
 	 * algorithm results with external libraries:
 	 *
+	 * <pre>
 	 * ./bin/flink run examples/flink-gelly-examples_*.jar --algorithm EdgeList \
 	 *     --input RMatGraph --type long --simplify undirected --scale $SCALE --edge_factor $EDGE_FACTOR \
 	 *     --output csv --filename undirectedRMatGraph.csv
+	 * </pre>
 	 *
 	 * @param scale vertices are generated in the range [0, 2<sup>scale</sup>)
 	 * @param edgeFactor the edge count is {@code edgeFactor} * 2<sup>scale</sup>

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/ChecksumHashCodeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/ChecksumHashCodeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/ChecksumHashCodeTest.java
index d25f9b6..7d82b80 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/ChecksumHashCodeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/ChecksumHashCodeTest.java
@@ -18,10 +18,11 @@
 
 package org.apache.flink.graph.asm.dataset;
 
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -30,6 +31,9 @@ import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link ChecksumHashCode}.
+ */
 public class ChecksumHashCodeTest {
 
 	private ExecutionEnvironment env;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CollectTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CollectTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CollectTest.java
index ec1af42..29b454b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CollectTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CollectTest.java
@@ -18,9 +18,10 @@
 
 package org.apache.flink.graph.asm.dataset;
 
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -29,6 +30,9 @@ import java.util.List;
 
 import static org.junit.Assert.assertArrayEquals;
 
+/**
+ * Tests for {@link Collect}.
+ */
 public class CollectTest {
 
 	private ExecutionEnvironment env;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CountTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CountTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CountTest.java
index 476c2e6..a1160ce 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CountTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/dataset/CountTest.java
@@ -18,9 +18,10 @@
 
 package org.apache.flink.graph.asm.dataset;
 
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
+
+import org.apache.commons.lang3.ArrayUtils;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -29,6 +30,9 @@ import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link Count}.
+ */
 public class CountTest {
 
 	private ExecutionEnvironment env;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java
index 18b52aa..22b47fe 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPairTest.java
@@ -29,10 +29,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeDegreesPair}.
+ */
 public class EdgeDegreesPairTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java
index 097b9c8..f0d51d2 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegreesTest.java
@@ -29,10 +29,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeSourceDegrees}.
+ */
 public class EdgeSourceDegreesTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java
index b082088..6d58bb0 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegreesTest.java
@@ -29,10 +29,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeTargetDegrees}.
+ */
 public class EdgeTargetDegreesTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java
index d0aad8f..5214282 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegreesTest.java
@@ -28,10 +28,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexDegrees}.
+ */
 public class VertexDegreesTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegreeTest.java
index 3cbcc74..f671cab 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegreeTest.java
@@ -27,10 +27,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexInDegree}.
+ */
 public class VertexInDegreeTest
 extends AsmTestBase {
 


[02/15] flink git commit: [FLINK-6728] Activate strict checkstyle for flink-quickstart

Posted by ch...@apache.org.
[FLINK-6728] Activate strict checkstyle for flink-quickstart

This closes #3996.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/81798e66
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/81798e66
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/81798e66

Branch: refs/heads/master
Commit: 81798e66b1e8ca96a2e4da00d662d9c153f42062
Parents: 6445da0
Author: zentol <ch...@apache.org>
Authored: Fri May 26 09:47:11 2017 +0200
Committer: zentol <ch...@apache.org>
Committed: Fri May 26 19:17:52 2017 +0200

----------------------------------------------------------------------
 .../java/org/apache/flink/quickstart/Dummy.java |  1 -
 .../src/main/java/BatchJob.java                 |  8 ++---
 .../main/java/SocketTextStreamWordCount.java    | 22 +++++-------
 .../src/main/java/StreamingJob.java             |  8 ++---
 .../src/main/java/WordCount.java                |  7 ++--
 .../projects/testArtifact/archetype.properties  |  4 +--
 .../java/org/apache/flink/quickstart/Dummy.java |  1 -
 .../projects/testArtifact/archetype.properties  |  4 +--
 flink-quickstart/pom.xml                        | 35 ++++++++++++++++++++
 9 files changed, 59 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/main/java/org/apache/flink/quickstart/Dummy.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/java/org/apache/flink/quickstart/Dummy.java b/flink-quickstart/flink-quickstart-java/src/main/java/org/apache/flink/quickstart/Dummy.java
index b9a76d2..b590d76 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/java/org/apache/flink/quickstart/Dummy.java
+++ b/flink-quickstart/flink-quickstart-java/src/main/java/org/apache/flink/quickstart/Dummy.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.quickstart;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/BatchJob.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/BatchJob.java b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/BatchJob.java
index a64742f..d0e68a4 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/BatchJob.java
+++ b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/BatchJob.java
@@ -23,10 +23,10 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 /**
  * Skeleton for a Flink Batch Job.
  *
- * For a full example of a Flink Batch Job, see the WordCountJob.java file in the
+ * <p>For a full example of a Flink Batch Job, see the WordCountJob.java file in the
  * same package/directory or have a look at the website.
  *
- * You can also generate a .jar file that you can submit on your Flink
+ * <p>You can also generate a .jar file that you can submit on your Flink
  * cluster.
  * Just type
  * 		mvn clean package
@@ -36,9 +36,9 @@ import org.apache.flink.api.java.ExecutionEnvironment;
  * From the CLI you can then run
  * 		./bin/flink run -c ${package}.BatchJob target/${artifactId}-${version}.jar
  *
- * For more information on the CLI see:
+ * <p>For more information on the CLI see:
  *
- * http://flink.apache.org/docs/latest/apis/cli.html
+ * <p>http://flink.apache.org/docs/latest/apis/cli.html
  */
 public class BatchJob {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/SocketTextStreamWordCount.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/SocketTextStreamWordCount.java b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/SocketTextStreamWordCount.java
index abd62bb..97df489 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/SocketTextStreamWordCount.java
+++ b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/SocketTextStreamWordCount.java
@@ -27,26 +27,22 @@ import org.apache.flink.util.Collector;
  * This example shows an implementation of WordCount with data from a text
  * socket. To run the example make sure that the service providing the text data
  * is already up and running.
- * 
- * <p>
- * To start an example socket text stream on your local machine run netcat from
+ *
+ * <p>To start an example socket text stream on your local machine run netcat from
  * a command line: <code>nc -lk 9999</code>, where the parameter specifies the
  * port number.
- * 
- * 
- * <p>
- * Usage:
+ *
+ * <p>Usage:
  * <code>SocketTextStreamWordCount &lt;hostname&gt; &lt;port&gt;</code>
  * <br>
- * 
- * <p>
- * This example shows how to:
+ *
+ * <p>This example shows how to:
  * <ul>
  * <li>use StreamExecutionEnvironment.socketTextStream
  * <li>write a simple Flink program
  * <li>write and use user-defined functions
  * </ul>
- * 
+ *
  * @see <a href="www.openbsd.org/cgi-bin/man.cgi?query=nc">netcat</a>
  */
 public class SocketTextStreamWordCount {
@@ -92,7 +88,7 @@ public class SocketTextStreamWordCount {
 	/**
 	 * Implements the string tokenizer that splits sentences into words as a user-defined
 	 * FlatMapFunction. The function takes a line (String) and splits it into
-	 * multiple pairs in the form of "(word,1)" (Tuple2<String, Integer>).
+	 * multiple pairs in the form of "(word,1)" (Tuple2&lt;String, Integer&gt;).
 	 */
 	public static final class LineSplitter implements FlatMapFunction<String, Tuple2<String, Integer>> {
 
@@ -108,5 +104,5 @@ public class SocketTextStreamWordCount {
 				}
 			}
 		}
-	}	
+	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/StreamingJob.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/StreamingJob.java b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/StreamingJob.java
index 2b15755..45a67ae 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/StreamingJob.java
+++ b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/StreamingJob.java
@@ -24,10 +24,10 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 /**
  * Skeleton for a Flink Streaming Job.
  *
- * For a full example of a Flink Streaming Job, see the SocketTextStreamWordCount.java
+ * <p>For a full example of a Flink Streaming Job, see the SocketTextStreamWordCount.java
  * file in the same package/directory or have a look at the website.
  *
- * You can also generate a .jar file that you can submit on your Flink
+ * <p>You can also generate a .jar file that you can submit on your Flink
  * cluster.
  * Just type
  * 		mvn clean package
@@ -37,9 +37,9 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
  * From the CLI you can then run
  * 		./bin/flink run -c ${package}.StreamingJob target/${artifactId}-${version}.jar
  *
- * For more information on the CLI see:
+ * <p>For more information on the CLI see:
  *
- * http://flink.apache.org/docs/latest/apis/cli.html
+ * <p>http://flink.apache.org/docs/latest/apis/cli.html
  */
 public class StreamingJob {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/WordCount.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/WordCount.java b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/WordCount.java
index 6202238..6c95389 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/WordCount.java
+++ b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/src/main/java/WordCount.java
@@ -18,9 +18,9 @@ package ${package};
  * limitations under the License.
  */
 
+import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.util.Collector;
 
@@ -28,8 +28,7 @@ import org.apache.flink.util.Collector;
  * Implements the "WordCount" program that computes a simple word occurrence histogram
  * over some sample data
  *
- * <p>
- * This example shows how to:
+ * <p>This example shows how to:
  * <ul>
  * <li>write a simple Flink program.
  * <li>use Tuple data types.
@@ -75,7 +74,7 @@ public class WordCount {
 	/**
 	 * Implements the string tokenizer that splits sentences into words as a user-defined
 	 * FlatMapFunction. The function takes a line (String) and splits it into
-	 * multiple pairs in the form of "(word,1)" (Tuple2<String, Integer>).
+	 * multiple pairs in the form of "(word,1)" (Tuple2&lt;String, Integer&gt;).
 	 */
 	public static final class LineSplitter implements FlatMapFunction<String, Tuple2<String, Integer>> {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-java/src/test/resources/projects/testArtifact/archetype.properties
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/test/resources/projects/testArtifact/archetype.properties b/flink-quickstart/flink-quickstart-java/src/test/resources/projects/testArtifact/archetype.properties
index 3f1bbd3..bfce480 100644
--- a/flink-quickstart/flink-quickstart-java/src/test/resources/projects/testArtifact/archetype.properties
+++ b/flink-quickstart/flink-quickstart-java/src/test/resources/projects/testArtifact/archetype.properties
@@ -5,9 +5,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-scala/src/main/java/org/apache/flink/quickstart/Dummy.java
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-scala/src/main/java/org/apache/flink/quickstart/Dummy.java b/flink-quickstart/flink-quickstart-scala/src/main/java/org/apache/flink/quickstart/Dummy.java
index b9a76d2..b590d76 100644
--- a/flink-quickstart/flink-quickstart-scala/src/main/java/org/apache/flink/quickstart/Dummy.java
+++ b/flink-quickstart/flink-quickstart-scala/src/main/java/org/apache/flink/quickstart/Dummy.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.quickstart;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/flink-quickstart-scala/src/test/resources/projects/testArtifact/archetype.properties
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-scala/src/test/resources/projects/testArtifact/archetype.properties b/flink-quickstart/flink-quickstart-scala/src/test/resources/projects/testArtifact/archetype.properties
index 3f1bbd3..bfce480 100644
--- a/flink-quickstart/flink-quickstart-scala/src/test/resources/projects/testArtifact/archetype.properties
+++ b/flink-quickstart/flink-quickstart-scala/src/test/resources/projects/testArtifact/archetype.properties
@@ -5,9 +5,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY

http://git-wip-us.apache.org/repos/asf/flink/blob/81798e66/flink-quickstart/pom.xml
----------------------------------------------------------------------
diff --git a/flink-quickstart/pom.xml b/flink-quickstart/pom.xml
index ea669cd..29befc8 100644
--- a/flink-quickstart/pom.xml
+++ b/flink-quickstart/pom.xml
@@ -92,6 +92,41 @@ under the License.
 					</delimiters>
 				</configuration>
 			</plugin>
+
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<dependencies>
+					<dependency>
+						<groupId>com.puppycrawl.tools</groupId>
+						<artifactId>checkstyle</artifactId>
+						<version>6.19</version>
+					</dependency>
+				</dependencies>
+				<configuration>
+					<configLocation>/tools/maven/strict-checkstyle.xml</configLocation>
+					<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+					<logViolationsToConsole>true</logViolationsToConsole>
+					<failOnViolation>true</failOnViolation>
+				</configuration>
+				<executions>
+					<!--
+					Execute checkstyle after compilation but before tests.
+
+					This ensures that any parsing or type checking errors are from
+					javac, so they look as expected. Beyond that, we want to
+					fail as early as possible.
+					-->
+					<execution>
+						<phase>test-compile</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
 		</plugins>
 		<resources>
 			<resource>


[12/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
[FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

This closes #3997.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/d313ac76
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/d313ac76
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/d313ac76

Branch: refs/heads/master
Commit: d313ac765e81f1b41f0eded45dfe23e1622be47a
Parents: 81798e6
Author: Greg Hogan <co...@greghogan.com>
Authored: Wed May 24 15:10:33 2017 -0400
Committer: zentol <ch...@apache.org>
Committed: Fri May 26 19:17:55 2017 +0200

----------------------------------------------------------------------
 flink-libraries/flink-gelly-examples/pom.xml    |  35 ++
 .../java/org/apache/flink/graph/Runner.java     |  11 +-
 .../apache/flink/graph/drivers/AdamicAdar.java  |   5 +-
 .../graph/drivers/ClusteringCoefficient.java    |   5 +-
 .../graph/drivers/ConnectedComponents.java      |   2 +-
 .../org/apache/flink/graph/drivers/Driver.java  |   4 +-
 .../flink/graph/drivers/GraphMetrics.java       |   3 +-
 .../org/apache/flink/graph/drivers/HITS.java    |  11 +-
 .../flink/graph/drivers/JaccardIndex.java       |   5 +-
 .../apache/flink/graph/drivers/PageRank.java    |   9 +-
 .../flink/graph/drivers/SimpleDriver.java       |   6 +-
 .../flink/graph/drivers/TriangleListing.java    |   5 +-
 .../apache/flink/graph/drivers/input/CSV.java   |   3 +-
 .../graph/drivers/input/CirculantGraph.java     |   2 +-
 .../graph/drivers/input/GeneratedGraph.java     |  17 +-
 .../flink/graph/drivers/input/GridGraph.java    |   4 +-
 .../flink/graph/drivers/input/RMatGraph.java    |   3 +-
 .../drivers/parameter/ChoiceParameter.java      |   5 +-
 .../drivers/parameter/IterationConvergence.java |   2 +-
 .../graph/drivers/parameter/Parameter.java      |   4 +-
 .../drivers/parameter/ParameterizedBase.java    |   3 +-
 .../flink/graph/drivers/parameter/Simplify.java |   5 +-
 .../graph/examples/EuclideanGraphWeighing.java  |  13 +-
 .../flink/graph/examples/GSAPageRank.java       |  10 +-
 .../examples/GSASingleSourceShortestPaths.java  |  22 +-
 .../flink/graph/examples/IncrementalSSSP.java   |  49 +-
 .../flink/graph/examples/MusicProfiles.java     |  28 +-
 .../apache/flink/graph/examples/PageRank.java   |   6 +-
 .../apache/flink/graph/examples/PregelSSSP.java |  26 +-
 .../examples/SingleSourceShortestPaths.java     |  18 +-
 .../examples/data/CommunityDetectionData.java   |   2 +-
 .../data/ConnectedComponentsDefaultData.java    |   2 +-
 .../graph/examples/data/EuclideanGraphData.java |   2 +-
 .../examples/data/IncrementalSSSPData.java      |   2 +-
 .../examples/data/LabelPropagationData.java     |  42 +-
 .../graph/examples/data/MusicProfilesData.java  |  18 +-
 .../flink/graph/examples/data/PageRankData.java |  14 +-
 .../data/SingleSourceShortestPathsData.java     |   8 +-
 .../graph/examples/data/SummarizationData.java  |  24 +-
 .../graph/examples/data/TriangleCountData.java  |  10 +-
 .../src/main/resources/logback.xml              |   2 +-
 .../scala/examples/ConnectedComponents.scala    |   9 +-
 .../examples/GSASingleSourceShortestPaths.scala |   2 +-
 .../examples/SingleSourceShortestPaths.scala    |  11 +-
 .../org/apache/flink/graph/RunnerITCase.java    |   4 +
 .../flink/graph/drivers/AdamicAdarITCase.java   |   6 +-
 .../drivers/ClusteringCoefficientITCase.java    |  44 +-
 .../drivers/ConnectedComponentsITCase.java      |   4 +
 .../flink/graph/drivers/DriverBaseITCase.java   |  10 +-
 .../flink/graph/drivers/EdgeListITCase.java     |   6 +-
 .../flink/graph/drivers/GraphMetricsITCase.java |   4 +
 .../apache/flink/graph/drivers/HITSITCase.java  |   4 +
 .../flink/graph/drivers/JaccardIndexITCase.java |   6 +-
 .../flink/graph/drivers/PageRankITCase.java     |   4 +
 .../graph/drivers/TriangleListingITCase.java    |   8 +-
 .../graph/drivers/input/GeneratedGraphTest.java |  34 +-
 .../drivers/parameter/BooleanParameterTest.java |   4 +
 .../drivers/parameter/ChoiceParameterTest.java  |   4 +
 .../drivers/parameter/DoubleParameterTest.java  |   4 +
 .../parameter/IterationConvergenceTest.java     |   4 +
 .../drivers/parameter/LongParameterTest.java    |   4 +
 .../drivers/parameter/ParameterTestBase.java    |   7 +-
 .../graph/drivers/parameter/SimplifyTest.java   |   4 +
 .../drivers/parameter/StringParameterTest.java  |   4 +
 .../graph/library/CommunityDetectionITCase.java |   4 +
 .../graph/library/LabelPropagationITCase.java   |   4 +
 .../graph/library/SummarizationITCase.java      |   4 +
 .../graph/library/TriangleEnumeratorITCase.java |   8 +-
 .../flink/graph/test/GatherSumApplyITCase.java  |   4 +
 .../examples/EuclideanGraphWeighingITCase.java  |   8 +-
 .../test/examples/IncrementalSSSPITCase.java    |  10 +-
 .../test/examples/MusicProfilesITCase.java      |   9 +-
 .../graph/test/examples/PageRankITCase.java     |   4 +
 .../SingleSourceShortestPathsITCase.java        |  11 +-
 .../test/GellyScalaAPICompletenessTest.scala    |   6 +-
 .../test/operations/GraphMutationsITCase.scala  |   2 +-
 .../test/operations/GraphOperationsITCase.scala |   3 +-
 .../test/operations/JoinWithEdgesITCase.scala   |   2 +-
 flink-libraries/flink-gelly/pom.xml             |  40 ++
 .../flink/graph/AbstractGraphAnalytic.java      |  59 --
 .../org/apache/flink/graph/AnalyticHelper.java  |   2 +-
 .../apache/flink/graph/EdgeJoinFunction.java    |   6 +-
 .../java/org/apache/flink/graph/EdgeOrder.java  |   2 +-
 .../org/apache/flink/graph/EdgesFunction.java   |  14 +-
 .../graph/EdgesFunctionWithVertexValue.java     |  14 +-
 .../main/java/org/apache/flink/graph/Graph.java | 224 +++----
 .../org/apache/flink/graph/GraphAnalytic.java   |   2 +-
 .../apache/flink/graph/GraphAnalyticBase.java   |  59 ++
 .../org/apache/flink/graph/GraphCsvReader.java  |  29 +-
 .../flink/graph/IterationConfiguration.java     |  42 +-
 .../apache/flink/graph/NeighborsFunction.java   |  14 +-
 .../graph/NeighborsFunctionWithVertexValue.java |  14 +-
 .../apache/flink/graph/ReduceEdgesFunction.java |   2 +-
 .../flink/graph/ReduceNeighborsFunction.java    |   2 +-
 .../java/org/apache/flink/graph/Triplet.java    |   2 +-
 .../apache/flink/graph/VertexJoinFunction.java  |   2 +-
 .../asm/dataset/AbstractDataSetAnalytic.java    |  58 --
 .../graph/asm/dataset/ChecksumHashCode.java     |  23 +-
 .../apache/flink/graph/asm/dataset/Collect.java |   2 +-
 .../apache/flink/graph/asm/dataset/Count.java   |   2 +-
 .../graph/asm/dataset/DataSetAnalytic.java      |   2 +-
 .../graph/asm/dataset/DataSetAnalyticBase.java  |  58 ++
 .../annotate/DegreeAnnotationFunctions.java     |   5 +
 .../annotate/directed/EdgeDegreesPair.java      |   2 +-
 .../annotate/directed/EdgeSourceDegrees.java    |   2 +-
 .../annotate/directed/EdgeTargetDegrees.java    |   2 +-
 .../degree/annotate/directed/VertexDegrees.java |   6 +-
 .../annotate/directed/VertexInDegree.java       |   2 +-
 .../annotate/directed/VertexOutDegree.java      |   2 +-
 .../annotate/undirected/EdgeDegreePair.java     |   2 +-
 .../annotate/undirected/EdgeSourceDegree.java   |   2 +-
 .../annotate/undirected/EdgeTargetDegree.java   |   2 +-
 .../annotate/undirected/VertexDegree.java       |   6 +-
 .../degree/filter/undirected/MaximumDegree.java |   2 +-
 .../graph/asm/simple/directed/Simplify.java     |   2 +-
 .../graph/asm/simple/undirected/Simplify.java   |   4 +-
 .../flink/graph/asm/translate/Translate.java    |   8 +-
 .../asm/translate/TranslateEdgeValues.java      |   2 +-
 .../graph/asm/translate/TranslateFunction.java  |   4 +-
 .../graph/asm/translate/TranslateGraphIds.java  |   6 +-
 .../asm/translate/TranslateVertexValues.java    |   2 +-
 .../translators/LongValueAddOffset.java         |   2 +-
 .../translators/LongValueToSignedIntValue.java  |   2 +-
 .../LongValueToUnsignedIntValue.java            |   2 +-
 .../flink/graph/bipartite/BipartiteGraph.java   |  16 +-
 .../flink/graph/bipartite/Projection.java       |   2 +-
 .../graph/generator/AbstractGraphGenerator.java |  35 --
 .../flink/graph/generator/CirculantGraph.java   |   8 +-
 .../flink/graph/generator/CompleteGraph.java    |   2 +-
 .../flink/graph/generator/CycleGraph.java       |   2 +-
 .../apache/flink/graph/generator/EchoGraph.java |   6 +-
 .../flink/graph/generator/EmptyGraph.java       |   4 +-
 .../flink/graph/generator/GraphGenerator.java   |   8 +-
 .../graph/generator/GraphGeneratorBase.java     |  42 ++
 .../graph/generator/GraphGeneratorUtils.java    |   7 +-
 .../apache/flink/graph/generator/GridGraph.java |   2 +-
 .../flink/graph/generator/HypercubeGraph.java   |   2 +-
 .../apache/flink/graph/generator/PathGraph.java |   6 +-
 .../apache/flink/graph/generator/RMatGraph.java |  61 +-
 .../graph/generator/SingletonEdgeGraph.java     |   2 +-
 .../apache/flink/graph/generator/StarGraph.java |  14 +-
 .../random/AbstractGeneratorFactory.java        |  72 ---
 .../generator/random/GeneratorFactoryBase.java  |  72 +++
 .../random/JDKRandomGeneratorFactory.java       |   2 +-
 .../random/MersenneTwisterFactory.java          |   2 +-
 .../graph/generator/random/RandomGenerable.java |   2 +-
 .../apache/flink/graph/gsa/ApplyFunction.java   |   6 +-
 .../flink/graph/gsa/GSAConfiguration.java       |  12 +-
 .../apache/flink/graph/gsa/GatherFunction.java  |   8 +-
 .../graph/gsa/GatherSumApplyIteration.java      |   6 +-
 .../org/apache/flink/graph/gsa/Neighbor.java    |   5 +-
 .../org/apache/flink/graph/gsa/SumFunction.java |   4 +-
 .../flink/graph/library/CommunityDetection.java |  28 +-
 .../graph/library/ConnectedComponents.java      |  12 +-
 .../graph/library/GSAConnectedComponents.java   |  10 +-
 .../library/GSASingleSourceShortestPaths.java   |   6 +-
 .../flink/graph/library/LabelPropagation.java   |  10 +-
 .../library/SingleSourceShortestPaths.java      |   6 +-
 .../flink/graph/library/Summarization.java      |  89 +--
 .../flink/graph/library/TriangleEnumerator.java |  29 +-
 .../directed/AverageClusteringCoefficient.java  |  25 +-
 .../directed/GlobalClusteringCoefficient.java   |  29 +-
 .../directed/LocalClusteringCoefficient.java    |  19 +-
 .../clustering/directed/TriadicCensus.java      |  65 ++-
 .../clustering/directed/TriangleListing.java    |  38 +-
 .../AverageClusteringCoefficient.java           |  25 +-
 .../undirected/GlobalClusteringCoefficient.java |  29 +-
 .../undirected/LocalClusteringCoefficient.java  |  18 +-
 .../clustering/undirected/TriadicCensus.java    |  35 +-
 .../clustering/undirected/TriangleListing.java  |  37 +-
 .../graph/library/link_analysis/Functions.java  |  45 --
 .../flink/graph/library/link_analysis/HITS.java | 582 -------------------
 .../graph/library/link_analysis/PageRank.java   | 544 -----------------
 .../graph/library/linkanalysis/Functions.java   |  45 ++
 .../flink/graph/library/linkanalysis/HITS.java  | 582 +++++++++++++++++++
 .../graph/library/linkanalysis/PageRank.java    | 544 +++++++++++++++++
 .../graph/library/metric/ChecksumHashCode.java  |   4 +-
 .../library/metric/directed/EdgeMetrics.java    |  37 +-
 .../library/metric/directed/VertexMetrics.java  |  35 +-
 .../library/metric/undirected/EdgeMetrics.java  |  31 +-
 .../metric/undirected/VertexMetrics.java        |  35 +-
 .../graph/library/similarity/AdamicAdar.java    |  32 +-
 .../graph/library/similarity/JaccardIndex.java  |  44 +-
 .../flink/graph/pregel/ComputeFunction.java     |  73 +--
 .../flink/graph/pregel/MessageCombiner.java     |   6 +-
 .../flink/graph/pregel/MessageIterator.java     |  22 +-
 .../pregel/VertexCentricConfiguration.java      |   4 +-
 .../graph/pregel/VertexCentricIteration.java    |  82 ++-
 .../flink/graph/spargel/GatherFunction.java     |  28 +-
 .../flink/graph/spargel/MessageIterator.java    |  19 +-
 .../flink/graph/spargel/ScatterFunction.java    |  58 +-
 .../spargel/ScatterGatherConfiguration.java     |  10 +-
 .../graph/spargel/ScatterGatherIteration.java   |  72 ++-
 .../graph/types/valuearray/IntValueArray.java   |  18 +-
 .../valuearray/IntValueArrayComparator.java     |   2 +-
 .../graph/types/valuearray/LongValueArray.java  |  18 +-
 .../valuearray/LongValueArrayComparator.java    |   2 +-
 .../graph/types/valuearray/NullValueArray.java  |   2 +-
 .../valuearray/NullValueArrayComparator.java    |   2 +-
 .../types/valuearray/StringValueArray.java      |  20 +-
 .../valuearray/StringValueArrayComparator.java  |   2 +-
 .../graph/types/valuearray/ValueArray.java      |   6 +-
 .../types/valuearray/ValueArrayFactory.java     |   2 +-
 .../flink/graph/utils/EdgeToTuple2Map.java      |   2 +-
 .../flink/graph/utils/EdgeToTuple3Map.java      |   2 +-
 .../apache/flink/graph/utils/GraphUtils.java    |   9 +-
 .../apache/flink/graph/utils/Murmur3_32.java    | 129 ----
 .../apache/flink/graph/utils/MurmurHash.java    | 129 ++++
 .../flink/graph/utils/Tuple2ToEdgeMap.java      |   2 +-
 .../flink/graph/utils/Tuple2ToVertexMap.java    |   2 +-
 .../flink/graph/utils/Tuple3ToEdgeMap.java      |   2 +-
 .../flink/graph/utils/VertexToTuple2Map.java    |   2 +-
 .../proxy/GraphAlgorithmWrappingDataSet.java    |   7 +-
 .../proxy/GraphAlgorithmWrappingGraph.java      |   7 +-
 .../graph/utils/proxy/OptionalBoolean.java      |  15 +-
 .../flink/graph/validation/GraphValidator.java  |   8 +-
 .../validation/InvalidVertexIdsValidator.java   |  11 +-
 .../org/apache/flink/graph/asm/AsmTestBase.java |  14 +-
 .../graph/asm/dataset/ChecksumHashCodeTest.java |   6 +-
 .../flink/graph/asm/dataset/CollectTest.java    |   6 +-
 .../flink/graph/asm/dataset/CountTest.java      |   6 +-
 .../annotate/directed/EdgeDegreesPairTest.java  |   4 +
 .../directed/EdgeSourceDegreesTest.java         |   4 +
 .../directed/EdgeTargetDegreesTest.java         |   4 +
 .../annotate/directed/VertexDegreesTest.java    |   4 +
 .../annotate/directed/VertexInDegreeTest.java   |   4 +
 .../annotate/directed/VertexOutDegreeTest.java  |   4 +
 .../annotate/undirected/EdgeDegreePairTest.java |   4 +
 .../undirected/EdgeSourceDegreeTest.java        |   4 +
 .../undirected/EdgeTargetDegreeTest.java        |   4 +
 .../annotate/undirected/VertexDegreeTest.java   |   4 +
 .../filter/undirected/MaximumDegreeTest.java    |   4 +
 .../graph/asm/simple/directed/SimplifyTest.java |   4 +
 .../asm/simple/undirected/SimplifyTest.java     |   4 +
 .../graph/asm/translate/TranslateTest.java      |  10 +-
 .../translators/LongValueAddOffsetTest.java     |   4 +
 .../LongValueToSignedIntValueTest.java          |  16 +-
 .../translators/LongValueToStringValueTest.java |   4 +
 .../LongValueToUnsignedIntValueTest.java        |   8 +-
 .../translate/translators/ToNullValueTest.java  |   4 +
 .../graph/bipartite/BipartiteEdgeTest.java      |   4 +-
 .../graph/bipartite/BipartiteGraphTest.java     |   4 +
 .../flink/graph/bipartite/ProjectionTest.java   |   4 +
 .../graph/generator/AbstractGraphTest.java      |  32 -
 .../graph/generator/CirculantGraphTest.java     |   8 +-
 .../graph/generator/CompleteGraphTest.java      |   8 +-
 .../flink/graph/generator/CycleGraphTest.java   |   6 +-
 .../flink/graph/generator/EchoGraphTest.java    |   6 +-
 .../flink/graph/generator/EmptyGraphTest.java   |   6 +-
 .../graph/generator/GraphGeneratorTestBase.java |  36 ++
 .../flink/graph/generator/GridGraphTest.java    |  10 +-
 .../graph/generator/HypercubeGraphTest.java     |   6 +-
 .../flink/graph/generator/PathGraphTest.java    |   6 +-
 .../flink/graph/generator/RMatGraphTest.java    |   8 +-
 .../graph/generator/SingletonEdgeGraphTest.java |   6 +-
 .../flink/graph/generator/StarGraphTest.java    |   6 +-
 .../apache/flink/graph/generator/TestUtils.java |   5 +
 .../apache/flink/graph/gsa/GSACompilerTest.java | 120 ++--
 .../flink/graph/gsa/GSATranslationTest.java     | 168 +++---
 ...ctedComponentsWithRandomisedEdgesITCase.java |   7 +-
 .../AverageClusteringCoefficientTest.java       |   6 +-
 .../GlobalClusteringCoefficientTest.java        |   8 +-
 .../LocalClusteringCoefficientTest.java         |   8 +-
 .../clustering/directed/TriadicCensusTest.java  |   8 +-
 .../directed/TriangleListingTest.java           |   8 +-
 .../AverageClusteringCoefficientTest.java       |   6 +-
 .../GlobalClusteringCoefficientTest.java        |   8 +-
 .../LocalClusteringCoefficientTest.java         |   8 +-
 .../undirected/TriadicCensusTest.java           |   8 +-
 .../undirected/TriangleListingTest.java         |   8 +-
 .../graph/library/link_analysis/HITSTest.java   | 144 -----
 .../library/link_analysis/PageRankTest.java     | 135 -----
 .../graph/library/linkanalysis/HITSTest.java    | 148 +++++
 .../library/linkanalysis/PageRankTest.java      | 139 +++++
 .../library/metric/ChecksumHashCodeTest.java    |   4 +
 .../metric/directed/EdgeMetricsTest.java        |   8 +-
 .../metric/directed/VertexMetricsTest.java      |   8 +-
 .../metric/undirected/EdgeMetricsTest.java      |   8 +-
 .../metric/undirected/VertexMetricsTest.java    |   8 +-
 .../library/similarity/AdamicAdarTest.java      |  18 +-
 .../library/similarity/JaccardIndexTest.java    |   4 +
 .../flink/graph/pregel/PregelCompilerTest.java  | 383 ++++++------
 .../graph/pregel/PregelTranslationTest.java     | 149 +++--
 .../graph/spargel/SpargelCompilerTest.java      | 292 +++++-----
 .../graph/spargel/SpargelTranslationTest.java   | 223 ++++---
 .../test/CollectionModeSuperstepITCase.java     |  13 +-
 .../test/GatherSumApplyConfigurationITCase.java |  14 +-
 .../test/ScatterGatherConfigurationITCase.java  |  10 +-
 .../apache/flink/graph/test/TestGraphUtils.java |  62 +-
 .../graph/test/operations/DegreesITCase.java    |   7 +-
 .../operations/DegreesWithExceptionITCase.java  |  22 +-
 .../test/operations/FromCollectionITCase.java   |   7 +-
 .../test/operations/GraphCreationITCase.java    |  10 +-
 .../operations/GraphCreationWithCsvITCase.java  |  44 +-
 .../GraphCreationWithMapperITCase.java          |   4 +
 .../test/operations/GraphMutationsITCase.java   |  46 +-
 .../test/operations/GraphOperationsITCase.java  |   6 +-
 .../test/operations/JoinWithEdgesITCase.java    |   7 +-
 .../test/operations/JoinWithVerticesITCase.java |   4 +
 .../graph/test/operations/MapEdgesITCase.java   |   6 +-
 .../test/operations/MapVerticesITCase.java      |   4 +
 .../operations/ReduceOnEdgesMethodsITCase.java  |  30 +-
 .../ReduceOnEdgesWithExceptionITCase.java       |  18 +-
 .../ReduceOnNeighborMethodsITCase.java          |  50 +-
 .../ReduceOnNeighborsWithExceptionITCase.java   |  17 +-
 .../test/operations/TypeExtractorTest.java      |  18 +-
 .../valuearray/IntValueArrayComparatorTest.java |   3 +
 .../valuearray/IntValueArraySerializerTest.java |   2 +-
 .../types/valuearray/IntValueArrayTest.java     |  20 +-
 .../LongValueArrayComparatorTest.java           |   3 +
 .../LongValueArraySerializerTest.java           |   2 +-
 .../types/valuearray/LongValueArrayTest.java    |  20 +-
 .../NullValueArrayComparatorTest.java           |   3 +
 .../NullValueArraySerializerTest.java           |   2 +-
 .../types/valuearray/NullValueArrayTest.java    |  12 +-
 .../StringValueArrayComparatorTest.java         |   3 +
 .../StringValueArraySerializerTest.java         |   2 +-
 .../types/valuearray/StringValueArrayTest.java  |  40 +-
 .../valuearray/ValueArrayTypeInfoTest.java      |   5 +-
 .../graph/utils/proxy/OptionalBooleanTest.java  |  11 +-
 320 files changed, 4607 insertions(+), 3968 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/pom.xml
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/pom.xml b/flink-libraries/flink-gelly-examples/pom.xml
index 68ad050..3da6263 100644
--- a/flink-libraries/flink-gelly-examples/pom.xml
+++ b/flink-libraries/flink-gelly-examples/pom.xml
@@ -234,6 +234,41 @@
 					<configLocation>${project.basedir}/../../tools/maven/scalastyle-config.xml</configLocation>
 				</configuration>
 			</plugin>
+
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<dependencies>
+					<dependency>
+						<groupId>com.puppycrawl.tools</groupId>
+						<artifactId>checkstyle</artifactId>
+						<version>6.19</version>
+					</dependency>
+				</dependencies>
+				<configuration>
+					<configLocation>/tools/maven/strict-checkstyle.xml</configLocation>
+					<suppressionsLocation>/tools/maven/suppressions.xml</suppressionsLocation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+					<logViolationsToConsole>true</logViolationsToConsole>
+					<failOnViolation>true</failOnViolation>
+				</configuration>
+				<executions>
+					<!--
+					Execute checkstyle after compilation but before tests.
+
+					This ensures that any parsing or type checking errors are from
+					javac, so they look as expected. Beyond that, we want to
+					fail as early as possible.
+					-->
+					<execution>
+						<phase>test-compile</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
 		</plugins>
 	</build>
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java
index 07cad1f..e468a58 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/Runner.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph;
 
-import org.apache.commons.lang3.StringEscapeUtils;
-import org.apache.commons.lang3.text.StrBuilder;
 import org.apache.flink.api.common.ExecutionConfig;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.io.CsvOutputFormat;
@@ -52,6 +50,9 @@ import org.apache.flink.graph.drivers.output.Print;
 import org.apache.flink.graph.drivers.parameter.Parameterized;
 import org.apache.flink.util.InstantiationUtil;
 
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.lang3.text.StrBuilder;
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
@@ -59,14 +60,14 @@ import java.util.List;
 /**
  * This default main class executes Flink drivers.
  *
- * An execution has one input, one algorithm, and one output. Anything more
+ * <p>An execution has one input, one algorithm, and one output. Anything more
  * complex can be expressed as a user program written in a JVM language.
  *
- * Inputs and algorithms are decoupled by, respectively, producing and
+ * <p>Inputs and algorithms are decoupled by, respectively, producing and
  * consuming a graph. Currently only {@code Graph} is supported but later
  * updates may add support for new graph types such as {@code BipartiteGraph}.
  *
- * Algorithms must explicitly support each type of output via implementation of
+ * <p>Algorithms must explicitly support each type of output via implementation of
  * interfaces. This is scalable as the number of outputs is small and finite.
  */
 public class Runner {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/AdamicAdar.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/AdamicAdar.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/AdamicAdar.java
index 8bf9268..c5867ed 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/AdamicAdar.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/AdamicAdar.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.drivers.output.CSV;
@@ -29,6 +27,9 @@ import org.apache.flink.graph.drivers.parameter.LongParameter;
 import org.apache.flink.graph.library.similarity.AdamicAdar.Result;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.text.StrBuilder;
+import org.apache.commons.lang3.text.WordUtils;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ClusteringCoefficient.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ClusteringCoefficient.java
index 4958b5a..bcd8ec4 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ClusteringCoefficient.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.GraphAnalytic;
@@ -31,6 +29,9 @@ import org.apache.flink.graph.drivers.parameter.ChoiceParameter;
 import org.apache.flink.graph.drivers.parameter.LongParameter;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.text.StrBuilder;
+import org.apache.commons.lang3.text.WordUtils;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java
index 95904d8..8a158a2 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/ConnectedComponents.java
@@ -36,7 +36,7 @@ import java.util.List;
 /**
  * Driver for {@link org.apache.flink.graph.library.GSAConnectedComponents}.
  *
- * The gather-sum-apply implementation is used because scatter-gather does not
+ * <p>The gather-sum-apply implementation is used because scatter-gather does not
  * handle object reuse (see FLINK-5891).
  */
 public class ConnectedComponents<K extends Comparable<K>, VV, EV>

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/Driver.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/Driver.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/Driver.java
index b001875..fda9079 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/Driver.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/Driver.java
@@ -27,7 +27,7 @@ import org.apache.flink.graph.drivers.parameter.Parameterized;
  * A driver for one or more {@link GraphAlgorithm}s and/or
  * {@link GraphAnalytic}s.
  *
- * It is preferable to include multiple, overlapping algorithms/analytics in
+ * <p>It is preferable to include multiple, overlapping algorithms/analytics in
  * the same driver both for simplicity and since this examples module
  * demonstrates Flink capabilities rather than absolute performance.
  *
@@ -56,7 +56,7 @@ extends Parameterized {
 	 * "Run" algorithms and analytics on the input graph. The execution plan
 	 * is not finalized here but in the output methods.
 	 *
-	 * Drivers are first configured, next planned, and finally the chosen
+	 * <p>Drivers are first configured, next planned, and finally the chosen
 	 * output method is called.
 	 *
 	 * @param graph input graph

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/GraphMetrics.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/GraphMetrics.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/GraphMetrics.java
index aef8f9f..ea02225 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/GraphMetrics.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/GraphMetrics.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.GraphAnalytic;
 import org.apache.flink.graph.asm.result.PrintableResult;
@@ -27,6 +26,8 @@ import org.apache.flink.graph.drivers.output.Print;
 import org.apache.flink.graph.drivers.parameter.ChoiceParameter;
 import org.apache.flink.graph.drivers.parameter.ParameterizedBase;
 
+import org.apache.commons.lang3.text.StrBuilder;
+
 /**
  * Driver for directed and undirected graph metrics analytics.
  *

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/HITS.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/HITS.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/HITS.java
index 209cddf..6f24c09 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/HITS.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/HITS.java
@@ -18,17 +18,18 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.drivers.output.CSV;
 import org.apache.flink.graph.drivers.output.Print;
 import org.apache.flink.graph.drivers.parameter.IterationConvergence;
-import org.apache.flink.graph.library.link_analysis.HITS.Result;
+import org.apache.flink.graph.library.linkanalysis.HITS.Result;
+
+import org.apache.commons.lang3.text.StrBuilder;
+import org.apache.commons.lang3.text.WordUtils;
 
 /**
- * Driver for {@link org.apache.flink.graph.library.link_analysis.HITS}.
+ * Driver for {@link org.apache.flink.graph.library.linkanalysis.HITS}.
  */
 public class HITS<K, VV, EV>
 extends SimpleDriver<K, VV, EV, Result<K>>
@@ -62,7 +63,7 @@ implements CSV, Print {
 	@Override
 	protected DataSet<Result<K>> simplePlan(Graph<K, VV, EV> graph) throws Exception {
 		return graph
-			.run(new org.apache.flink.graph.library.link_analysis.HITS<K, VV, EV>(
+			.run(new org.apache.flink.graph.library.linkanalysis.HITS<K, VV, EV>(
 				iterationConvergence.getValue().iterations,
 				iterationConvergence.getValue().convergenceThreshold));
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/JaccardIndex.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/JaccardIndex.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/JaccardIndex.java
index d5b2ae3..f6e10f0 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/JaccardIndex.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/JaccardIndex.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.drivers.output.CSV;
@@ -30,6 +28,9 @@ import org.apache.flink.graph.drivers.parameter.LongParameter;
 import org.apache.flink.graph.library.similarity.JaccardIndex.Result;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.text.StrBuilder;
+import org.apache.commons.lang3.text.WordUtils;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/PageRank.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/PageRank.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/PageRank.java
index 5d74bdb..b2602b9 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/PageRank.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/PageRank.java
@@ -18,17 +18,18 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.drivers.output.CSV;
 import org.apache.flink.graph.drivers.output.Print;
 import org.apache.flink.graph.drivers.parameter.DoubleParameter;
 import org.apache.flink.graph.drivers.parameter.IterationConvergence;
-import org.apache.flink.graph.library.link_analysis.PageRank.Result;
+import org.apache.flink.graph.library.linkanalysis.PageRank.Result;
+
+import org.apache.commons.lang3.text.StrBuilder;
 
 /**
- * @see org.apache.flink.graph.library.link_analysis.PageRank
+ * @see org.apache.flink.graph.library.linkanalysis.PageRank
  */
 public class PageRank<K, VV, EV>
 extends SimpleDriver<K, VV, EV, Result<K>>
@@ -67,7 +68,7 @@ implements CSV, Print {
 	@Override
 	protected DataSet<Result<K>> simplePlan(Graph<K, VV, EV> graph) throws Exception {
 		return graph
-			.run(new org.apache.flink.graph.library.link_analysis.PageRank<K, VV, EV>(
+			.run(new org.apache.flink.graph.library.linkanalysis.PageRank<K, VV, EV>(
 				dampingFactor.getValue(),
 				iterationConvergence.getValue().iterations,
 				iterationConvergence.getValue().convergenceThreshold));

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/SimpleDriver.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/SimpleDriver.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/SimpleDriver.java
index 5cecca1..a5ace26 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/SimpleDriver.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/SimpleDriver.java
@@ -61,7 +61,7 @@ implements Driver<K, VV, EV> {
 	/**
 	 * Print hash of execution results.
 	 *
-	 * Does *not* implement/override {@code Hash} since {@link Driver}
+	 * <p>Does *not* implement/override {@code Hash} since {@link Driver}
 	 * implementations designate the appropriate outputs.
 	 *
 	 * @param executionName job name
@@ -78,7 +78,7 @@ implements Driver<K, VV, EV> {
 	/**
 	 * Print execution results.
 	 *
-	 * Does *not* implement/override {@code Print} since {@link Driver}
+	 * <p>Does *not* implement/override {@code Print} since {@link Driver}
 	 * implementations designate the appropriate outputs.
 	 *
 	 * @param executionName job name
@@ -95,7 +95,7 @@ implements Driver<K, VV, EV> {
 	/**
 	 * Write execution results to file using CSV format.
 	 *
-	 * Does *not* implement/override {@code CSV} since {@link Driver}
+	 * <p>Does *not* implement/override {@code CSV} since {@link Driver}
 	 * implementations designate the appropriate outputs.
 	 *
 	 * @param filename output filename

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/TriangleListing.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/TriangleListing.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/TriangleListing.java
index 5157b8e..1c9bdc5 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/TriangleListing.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/TriangleListing.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.drivers;
 
-import org.apache.commons.lang3.text.StrBuilder;
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.GraphAnalytic;
@@ -32,6 +30,9 @@ import org.apache.flink.graph.drivers.parameter.ChoiceParameter;
 import org.apache.flink.graph.drivers.parameter.LongParameter;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.text.StrBuilder;
+import org.apache.commons.lang3.text.WordUtils;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CSV.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CSV.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CSV.java
index 58b65b6..b3f88f6 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CSV.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CSV.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.drivers.input;
 
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.io.CsvInputFormat;
 import org.apache.flink.client.program.ProgramParametrizationException;
@@ -33,6 +32,8 @@ import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
 import org.apache.flink.types.StringValue;
 
+import org.apache.commons.lang3.text.WordUtils;
+
 /**
  * Read a {@link Graph} from a CSV file using {@link IntValue},
  * {@link LongValue}, or {@link StringValue} keys.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CirculantGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CirculantGraph.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CirculantGraph.java
index 14ee816..a5a2540 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CirculantGraph.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/CirculantGraph.java
@@ -83,7 +83,7 @@ extends GeneratedGraph<LongValue> {
 			ProgramParametrizationException exception = new ProgramParametrizationException("Circulant offset range" +
 				" must use a colon to separate the integer offset and integer length:" + field + "'");
 
-			if (! field.contains(":")) {
+			if (!field.contains(":")) {
 				throw exception;
 			}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GeneratedGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GeneratedGraph.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GeneratedGraph.java
index 610722c..d4467df 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GeneratedGraph.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GeneratedGraph.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.drivers.input;
 
-import org.apache.commons.lang3.text.WordUtils;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.graph.Graph;
@@ -34,6 +33,8 @@ import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
 import org.apache.flink.types.ShortValue;
 
+import org.apache.commons.lang3.text.WordUtils;
+
 /**
  * Base class for generated graphs.
  *
@@ -177,7 +178,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link ByteValue}.
 	 *
-	 * Throws {@link RuntimeException} for byte overflow.
+	 * <p>Throws {@link RuntimeException} for byte overflow.
 	 */
 	static class LongValueToUnsignedByteValue
 	implements TranslateFunction<LongValue, ByteValue> {
@@ -205,7 +206,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link Byte}.
 	 *
-	 * Throws {@link RuntimeException} for byte overflow.
+	 * <p>Throws {@link RuntimeException} for byte overflow.
 	 */
 	static class LongValueToUnsignedByte
 	implements TranslateFunction<LongValue, Byte> {
@@ -227,7 +228,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link ShortValue}.
 	 *
-	 * Throws {@link RuntimeException} for short overflow.
+	 * <p>Throws {@link RuntimeException} for short overflow.
 	 */
 	static class LongValueToUnsignedShortValue
 	implements TranslateFunction<LongValue, ShortValue> {
@@ -255,7 +256,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link Short}.
 	 *
-	 * Throws {@link RuntimeException} for short overflow.
+	 * <p>Throws {@link RuntimeException} for short overflow.
 	 */
 	static class LongValueToUnsignedShort
 	implements TranslateFunction<LongValue, Short> {
@@ -277,7 +278,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link CharValue}.
 	 *
-	 * Throws {@link RuntimeException} for char overflow.
+	 * <p>Throws {@link RuntimeException} for char overflow.
 	 */
 	static class LongValueToCharValue
 	implements TranslateFunction<LongValue, CharValue> {
@@ -305,7 +306,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@code Character}.
 	 *
-	 * Throws {@link RuntimeException} for char overflow.
+	 * <p>Throws {@link RuntimeException} for char overflow.
 	 */
 	static class LongValueToChar
 	implements TranslateFunction<LongValue, Character> {
@@ -327,7 +328,7 @@ implements Input<K, NullValue, NullValue> {
 	/**
 	 * Translate {@link LongValue} to {@link Integer}.
 	 *
-	 * Throws {@link RuntimeException} for integer overflow.
+	 * <p>Throws {@link RuntimeException} for integer overflow.
 	 */
 	static class LongValueToUnsignedInt
 	implements TranslateFunction<LongValue, Integer> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GridGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GridGraph.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GridGraph.java
index 2ce3c77..a3aabd9 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GridGraph.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/GridGraph.java
@@ -134,7 +134,7 @@ extends GeneratedGraph<LongValue> {
 				"a colon to separate the integer size and boolean indicating whether the dimension endpoints are " +
 				"connected: '" + field + "'");
 
-			if (! field.contains(":")) {
+			if (!field.contains(":")) {
 				throw exception;
 			}
 
@@ -147,7 +147,7 @@ extends GeneratedGraph<LongValue> {
 			try {
 				size = Long.parseLong(parts[0]);
 				wrapEndpoints = Boolean.parseBoolean(parts[1]);
-			} catch(NumberFormatException ex) {
+			} catch (NumberFormatException ex) {
 				throw exception;
 			}
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/RMatGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/RMatGraph.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/RMatGraph.java
index adee1eb..3b75089 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/RMatGraph.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/input/RMatGraph.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.drivers.input;
 
-import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.drivers.parameter.BooleanParameter;
@@ -31,6 +30,8 @@ import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
 import org.apache.flink.types.StringValue;
 
+import org.apache.commons.math3.random.JDKRandomGenerator;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ChoiceParameter.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ChoiceParameter.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ChoiceParameter.java
index f1b716d..c8033de 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ChoiceParameter.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ChoiceParameter.java
@@ -18,12 +18,13 @@
 
 package org.apache.flink.graph.drivers.parameter;
 
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.text.StrBuilder;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.client.program.ProgramParametrizationException;
 import org.apache.flink.util.Preconditions;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.text.StrBuilder;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/IterationConvergence.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/IterationConvergence.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/IterationConvergence.java
index e9d648a..f02c536 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/IterationConvergence.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/IterationConvergence.java
@@ -26,7 +26,7 @@ import org.apache.flink.graph.drivers.parameter.IterationConvergence.Value;
  * of iterations or a convergence threshold which stops computation when the
  * total change in scores is below a given delta.
  *
- * If the command-line configuration specifies neither a number of iterations
+ * <p>If the command-line configuration specifies neither a number of iterations
  * nor a convergence threshold then a default number of iterations is used
  * with an infinite convergence threshold. Otherwise, when either value is
  * configured then an unset value is set to infinity.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Parameter.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Parameter.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Parameter.java
index 46785f8..9dbac4b 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Parameter.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Parameter.java
@@ -30,9 +30,9 @@ public interface Parameter<T> {
 	/**
 	 * An informal usage string. Parameter names are prefixed with "--".
 	 *
-	 * Optional parameters are enclosed by "[" and "]".
+	 * <p>Optional parameters are enclosed by "[" and "]".
 	 *
-	 * Generic values are represented by all-caps with specific values enclosed
+	 * <p>Generic values are represented by all-caps with specific values enclosed
 	 * by "&lt;" and "&gt;".
 	 *
 	 * @return command-line usage string

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ParameterizedBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ParameterizedBase.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ParameterizedBase.java
index 5f36ff5..a3991cf 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ParameterizedBase.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/ParameterizedBase.java
@@ -18,10 +18,11 @@
 
 package org.apache.flink.graph.drivers.parameter;
 
-import org.apache.commons.lang3.text.StrBuilder;
 import org.apache.flink.api.java.utils.ParameterTool;
 import org.apache.flink.client.program.ProgramParametrizationException;
 
+import org.apache.commons.lang3.text.StrBuilder;
+
 import java.util.ArrayList;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java
index 3e9fd9a..4d9e481 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/drivers/parameter/Simplify.java
@@ -30,12 +30,15 @@ import org.apache.flink.types.NullValue;
  * a directed graph where each undirected edge is represented by a directed
  * edge in each direction.
  *
- * This {@link Parameter} indicates whether to simplify the graph and if the
+ * <p>This {@link Parameter} indicates whether to simplify the graph and if the
  * graph should be directed or undirected.
  */
 public class Simplify
 implements Parameter<Ordering> {
 
+	/**
+	 * Whether and how to simplify the graph.
+	 */
 	public enum Ordering {
 		// leave the graph unchanged
 		NONE,

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java
index a7f9198..6380628 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/EuclideanGraphWeighing.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.examples;
 
-import org.apache.flink.graph.examples.data.EuclideanGraphData;
 import org.apache.flink.api.common.ProgramDescription;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.DataSet;
@@ -30,19 +29,19 @@ import org.apache.flink.graph.EdgeJoinFunction;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Triplet;
 import org.apache.flink.graph.Vertex;
+import org.apache.flink.graph.examples.data.EuclideanGraphData;
 
 import java.io.Serializable;
 
 /**
  * This example shows how to use Gelly's {@link Graph#getTriplets()} and
  * {@link Graph#joinWithEdges(DataSet, EdgeJoinFunction)} methods.
- * 
- * Given a directed, unweighted graph, with vertex values representing points in a plain,
+ *
+ * <p>Given a directed, unweighted graph, with vertex values representing points in a plain,
  * return a weighted graph where the edge weights are equal to the Euclidean distance between the
  * src and the trg vertex values.
  *
- * <p>
- * Input files are plain text files and must be formatted as follows:
+ * <p>Input files are plain text files and must be formatted as follows:
  * <ul>
  * 	<li> Vertices are represented by their vertexIds and vertex values and are separated by newlines,
  * 	the value being formed of two doubles separated by a comma.
@@ -52,7 +51,7 @@ import java.io.Serializable;
  * 	For example: <code>1,2\n1,3\n</code> defines two edges 1-2 and 1-3.
  * </ul>
  *
- * Usage <code>EuclideanGraphWeighing &lt;vertex path&gt; &lt;edge path&gt; &lt;result path&gt;</code><br>
+ * <p>Usage <code>EuclideanGraphWeighing &lt;vertex path&gt; &lt;edge path&gt; &lt;result path&gt;</code><br>
  * If no parameters are provided, the program is run with default data from
  * {@link EuclideanGraphData}
  */
@@ -137,7 +136,7 @@ public class EuclideanGraphWeighing implements ProgramDescription {
 		}
 
 		public double euclideanDistance(Point other) {
-			return Math.sqrt((x-other.x)*(x-other.x) + (y-other.y)*(y-other.y));
+			return Math.sqrt((x - other.x) * (x - other.x) + (y - other.y) * (y - other.y));
 		}
 
 		@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java
index db2e4f2..4508419 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSAPageRank.java
@@ -35,7 +35,7 @@ import org.apache.flink.types.LongValue;
  * This is an implementation of a simple PageRank algorithm, using a gather-sum-apply iteration.
  * The user can define the damping factor and the maximum number of iterations.
  *
- * The implementation assumes that each page has at least one incoming and one outgoing link.
+ * <p>The implementation assumes that each page has at least one incoming and one outgoing link.
  */
 public class GSAPageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet<Vertex<K, Double>>> {
 
@@ -45,8 +45,8 @@ public class GSAPageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet
 	/**
 	 * Creates an instance of the GSA PageRank algorithm.
 	 *
-	 * The implementation assumes that each page has at least one incoming and one outgoing link.
-	 * 
+	 * <p>The implementation assumes that each page has at least one incoming and one outgoing link.
+	 *
 	 * @param beta the damping factor
 	 * @param maxIterations the maximum number of iterations
 	 */
@@ -81,7 +81,7 @@ public class GSAPageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet
 		public Double gather(Neighbor<Double, Double> neighbor) {
 			double neighborRank = neighbor.getNeighborValue();
 
-			if(getSuperstepNumber() == 1) {
+			if (getSuperstepNumber() == 1) {
 				neighborRank = 1.0 / this.getNumberOfVertices();
 			}
 
@@ -109,7 +109,7 @@ public class GSAPageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet
 
 		@Override
 		public void apply(Double rankSum, Double currentValue) {
-			setResult((1-beta)/this.getNumberOfVertices() + beta * rankSum);
+			setResult((1 - beta) / this.getNumberOfVertices() + beta * rankSum);
 		}
 	}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java
index 1cd3549..aa2b7e9 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/GSASingleSourceShortestPaths.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.examples;
 
-import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData;
 import org.apache.flink.api.common.ProgramDescription;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.DataSet;
@@ -26,26 +25,27 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
+import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData;
 import org.apache.flink.graph.gsa.ApplyFunction;
 import org.apache.flink.graph.gsa.GatherFunction;
-import org.apache.flink.graph.gsa.SumFunction;
 import org.apache.flink.graph.gsa.Neighbor;
+import org.apache.flink.graph.gsa.SumFunction;
 import org.apache.flink.graph.utils.Tuple3ToEdgeMap;
 
 /**
  * This example shows how to use Gelly's Gather-Sum-Apply iterations.
- * 
- * It is an implementation of the Single-Source-Shortest-Paths algorithm.
+ *
+ * <p>It is an implementation of the Single-Source-Shortest-Paths algorithm.
  * For a scatter-gather implementation of the same algorithm, please refer to {@link SingleSourceShortestPaths}
- * and for a vertex-centric implementation, see {@link PregelSSSP}. 
+ * and for a vertex-centric implementation, see {@link PregelSSSP}.
  *
- * The input file is a plain text file and must be formatted as follows:
+ * <p>The input file is a plain text file and must be formatted as follows:
  * Edges are represented by tuples of srcVertexId, trgVertexId, distance which are
  * separated by tabs. Edges themselves are separated by newlines.
  * For example: <code>1\t2\t0.1\n1\t3\t1.4\n</code> defines two edges,
  * edge 1-2 with distance 0.1, and edge 1-3 with distance 1.4.
  *
- * If no parameters are provided, the program is run with default data from
+ * <p>If no parameters are provided, the program is run with default data from
  * {@link SingleSourceShortestPathsData}
  */
 public class GSASingleSourceShortestPaths implements ProgramDescription {
@@ -56,7 +56,7 @@ public class GSASingleSourceShortestPaths implements ProgramDescription {
 
 	public static void main(String[] args) throws Exception {
 
-		if(!parseParameters(args)) {
+		if (!parseParameters(args)) {
 			return;
 		}
 
@@ -74,7 +74,7 @@ public class GSASingleSourceShortestPaths implements ProgramDescription {
 		DataSet<Vertex<Long, Double>> singleSourceShortestPaths = result.getVertices();
 
 		// emit result
-		if(fileOutput) {
+		if (fileOutput) {
 			singleSourceShortestPaths.writeAsCsv(outputPath, "\n", ",");
 
 			// since file sinks are lazy, we trigger the execution explicitly
@@ -140,7 +140,7 @@ public class GSASingleSourceShortestPaths implements ProgramDescription {
 
 	private static boolean fileOutput = false;
 
-	private static Long srcVertexId = 1l;
+	private static Long srcVertexId = 1L;
 
 	private static String edgesInputPath = null;
 
@@ -151,7 +151,7 @@ public class GSASingleSourceShortestPaths implements ProgramDescription {
 	private static boolean parseParameters(String[] args) {
 
 		if (args.length > 0) {
-			if(args.length != 4) {
+			if (args.length != 4) {
 				System.err.println("Usage: GSASingleSourceShortestPaths <source vertex id>" +
 						" <input edges path> <output path> <num iterations>");
 				return false;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java
index 631384c..197ad68 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/IncrementalSSSP.java
@@ -33,26 +33,26 @@ import org.apache.flink.graph.spargel.ScatterFunction;
 import org.apache.flink.graph.spargel.ScatterGatherConfiguration;
 
 /**
- * This example illustrates how to 
+ * This example illustrates how to
  * <ul>
  *  <li> create a Graph directly from CSV files
  *  <li> use the scatter-gather iteration's messaging direction configuration option
  * </ul>
- * 
- * Incremental Single Sink Shortest Paths Example. Shortest Paths are incrementally updated
+ *
+ * <p>Incremental Single Sink Shortest Paths Example. Shortest Paths are incrementally updated
  * upon edge removal.
  *
- * The program takes as input the resulted graph after a SSSP computation,
- * an edge to be removed and the initial graph(i.e. before SSSP was computed).
+ * <p>The program takes as input the resultant graph after a SSSP computation,
+ * an edge to be removed and the initial graph (i.e. before SSSP was computed).
  * In the following description, SP-graph is used as an abbreviation for
  * the graph resulted from the SSSP computation. We denote the edges that belong to this
  * graph by SP-edges.
- *
- * - If the removed edge does not belong to the SP-graph, no computation is necessary.
- * The edge is simply removed from the graph.
+ * - If the removed edge does not belong to the SP-graph then no computation is necessary
+ * and the edge is simply removed from the graph.
  * - If the removed edge is an SP-edge, then all nodes, whose shortest path contains the removed edge,
  * potentially require re-computation.
- * When the edge {@code <u, v>} is removed, v checks if it has another out-going SP-edge.
+ *
+ * <p>When the edge <code>&lt;u, v&gt;</code> is removed, v checks if it has another out-going SP-edge.
  * If yes, no further computation is required.
  * If v has no other out-going SP-edge, it invalidates its current value, by setting it to INF.
  * Then, it informs all its SP-in-neighbors by sending them an INVALIDATE message.
@@ -61,7 +61,7 @@ import org.apache.flink.graph.spargel.ScatterGatherConfiguration;
  * The propagation stops when a vertex with an alternative shortest path is reached
  * or when we reach a vertex with no SP-in-neighbors.
  *
- * Usage <code>IncrementalSSSP &lt;vertex path&gt; &lt;edge path&gt; &lt;edges in SSSP&gt;
+ * <p>Usage <code>IncrementalSSSP &lt;vertex path&gt; &lt;edge path&gt; &lt;edges in SSSP&gt;
  * &lt;src id edge to be removed&gt; &lt;trg id edge to be removed&gt; &lt;val edge to be removed&gt;
  * &lt;result path&gt; &lt;number of iterations&gt;</code><br>
  * If no parameters are provided, the program is run with default data from
@@ -72,7 +72,7 @@ public class IncrementalSSSP implements ProgramDescription {
 
 	public static void main(String [] args) throws Exception {
 
-		if(!parseParameters(args)) {
+		if (!parseParameters(args)) {
 			return;
 		}
 
@@ -91,7 +91,7 @@ public class IncrementalSSSP implements ProgramDescription {
 		// configure the iteration
 		ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
 
-		if(isInSSSP(edgeToBeRemoved, ssspGraph.getEdges())) {
+		if (isInSSSP(edgeToBeRemoved, ssspGraph.getEdges())) {
 
 			parameters.setDirection(EdgeDirection.IN);
 			parameters.setOptDegrees(true);
@@ -103,7 +103,7 @@ public class IncrementalSSSP implements ProgramDescription {
 			DataSet<Vertex<Long, Double>> resultedVertices = result.getVertices();
 
 			// Emit results
-			if(fileOutput) {
+			if (fileOutput) {
 				resultedVertices.writeAsCsv(outputPath, "\n", ",");
 				env.execute("Incremental SSSP Example");
 			} else {
@@ -111,7 +111,7 @@ public class IncrementalSSSP implements ProgramDescription {
 			}
 		} else {
 			// print the vertices
-			if(fileOutput) {
+			if (fileOutput) {
 				graph.getVertices().writeAsCsv(outputPath, "\n", ",");
 				env.execute("Incremental SSSP Example");
 			} else {
@@ -147,6 +147,9 @@ public class IncrementalSSSP implements ProgramDescription {
 		}).count() > 0;
 	}
 
+	/**
+	 * Initiate or propagate INVALIDATE messages.
+	 */
 	public static final class InvalidateMessenger extends ScatterFunction<Long, Double, Double, Double> {
 
 		private Edge<Long, Double> edgeToBeRemoved;
@@ -158,23 +161,27 @@ public class IncrementalSSSP implements ProgramDescription {
 		@Override
 		public void sendMessages(Vertex<Long, Double> vertex) throws Exception {
 
-
-			if(getSuperstepNumber() == 1) {
-				if(vertex.getId().equals(edgeToBeRemoved.getSource())) {
+			if (getSuperstepNumber() == 1) {
+				if (vertex.getId().equals(edgeToBeRemoved.getSource())) {
 					// activate the edge target
 					sendMessageTo(edgeToBeRemoved.getSource(), Double.MAX_VALUE);
 				}
 			}
 
-			if(getSuperstepNumber() > 1) {
+			if (getSuperstepNumber() > 1) {
 				// invalidate all edges
-				for(Edge<Long, Double> edge : getEdges()) {
+				for (Edge<Long, Double> edge : getEdges()) {
 					sendMessageTo(edge.getSource(), Double.MAX_VALUE);
 				}
 			}
 		}
 	}
 
+	/**
+	 * When an INVALIDATE message indicates that the only shortest path
+	 * containing this vertex has been removed then set the vertex distance to
+	 * infinity.
+	 */
 	public static final class VertexDistanceUpdater extends GatherFunction<Long, Double, Double> {
 
 		@Override
@@ -239,7 +246,7 @@ public class IncrementalSSSP implements ProgramDescription {
 	}
 
 	private static Graph<Long, Double, Double> getGraph(ExecutionEnvironment env) {
-		if(fileOutput) {
+		if (fileOutput) {
 			return Graph.fromCsvReader(verticesInputPath, edgesInputPath, env).lineDelimiterEdges("\n")
 					.types(Long.class, Double.class, Double.class);
 		} else {
@@ -248,7 +255,7 @@ public class IncrementalSSSP implements ProgramDescription {
 	}
 
 	private static Graph<Long, Double, Double> getSSSPGraph(ExecutionEnvironment env) {
-		if(fileOutput) {
+		if (fileOutput) {
 			return Graph.fromCsvReader(verticesInputPath, edgesInSSSPInputPath, env).lineDelimiterEdges("\n")
 					.types(Long.class, Double.class, Double.class);
 		} else {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java
index b7b590d..43a0a9b 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/MusicProfiles.java
@@ -18,9 +18,6 @@
 
 package org.apache.flink.graph.examples;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.flink.api.common.ProgramDescription;
 import org.apache.flink.api.common.functions.CoGroupFunction;
 import org.apache.flink.api.common.functions.FilterFunction;
@@ -43,6 +40,9 @@ import org.apache.flink.graph.library.LabelPropagation;
 import org.apache.flink.types.NullValue;
 import org.apache.flink.util.Collector;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * This example demonstrates how to mix the DataSet Flink API with the Gelly API.
  * The input is a set &lt;userId - songId - playCount&gt; triplets and
@@ -57,14 +57,14 @@ import org.apache.flink.util.Collector;
  * Finally, we use the graph API to run the label propagation community detection algorithm on
  * the similarity graph.
  *
- * The triplets input is expected to be given as one triplet per line,
+ * <p>The triplets input is expected to be given as one triplet per line,
  * in the following format: "&lt;userID&gt;\t&lt;songID&gt;\t&lt;playcount&gt;".
  *
- * The mismatches input file is expected to contain one mismatch record per line,
+ * <p>The mismatches input file is expected to contain one mismatch record per line,
  * in the following format:
  * "ERROR: &lt;songID trackID&gt; song_title"
  *
- * If no arguments are provided, the example runs with default data from {@link MusicProfilesData}.
+ * <p>If no arguments are provided, the example runs with default data from {@link MusicProfilesData}.
  */
 @SuppressWarnings("serial")
 public class MusicProfiles implements ProgramDescription {
@@ -131,7 +131,7 @@ public class MusicProfiles implements ProgramDescription {
 		Graph<String, Long, NullValue> similarUsersGraph = Graph.fromDataSet(similarUsers,
 				new MapFunction<String, Long>() {
 					public Long map(String value) {
-						return 1l;
+						return 1L;
 					}
 				}, env).getUndirected();
 
@@ -167,7 +167,7 @@ public class MusicProfiles implements ProgramDescription {
 
 	}
 
-	public static final class ExtractMismatchSongIds implements MapFunction<String, Tuple1<String>> {
+	private static final class ExtractMismatchSongIds implements MapFunction<String, Tuple1<String>> {
 
 		public Tuple1<String> map(String value) {
 			String[] tokens = value.split("\\s+");
@@ -176,7 +176,7 @@ public class MusicProfiles implements ProgramDescription {
 		}
 	}
 
-	public static final class FilterOutMismatches implements CoGroupFunction<Tuple3<String, String, Integer>,
+	private static final class FilterOutMismatches implements CoGroupFunction<Tuple3<String, String, Integer>,
 		Tuple1<String>, Tuple3<String, String, Integer>> {
 
 		public void coGroup(Iterable<Tuple3<String, String, Integer>> triplets,
@@ -191,13 +191,13 @@ public class MusicProfiles implements ProgramDescription {
 		}
 	}
 
-	public static final class FilterSongNodes implements FilterFunction<Tuple2<String, String>> {
+	private static final class FilterSongNodes implements FilterFunction<Tuple2<String, String>> {
 		public boolean filter(Tuple2<String, String> value) throws Exception {
 			return !value.f1.equals("");
 		}
 	}
 
-	public static final class GetTopSongPerUser	implements EdgesFunctionWithVertexValue<String, NullValue, Integer,
+	private static final class GetTopSongPerUser	implements EdgesFunctionWithVertexValue<String, NullValue, Integer,
 		Tuple2<String, String>> {
 
 		public void iterateEdges(Vertex<String, NullValue> vertex,
@@ -215,7 +215,7 @@ public class MusicProfiles implements ProgramDescription {
 		}
 	}
 
-	public static final class CreateSimilarUserEdges implements GroupReduceFunction<Edge<String, Integer>,
+	private static final class CreateSimilarUserEdges implements GroupReduceFunction<Edge<String, Integer>,
 		Edge<String, NullValue>> {
 
 		public void reduce(Iterable<Edge<String, Integer>> edges, Collector<Edge<String, NullValue>> out) {
@@ -257,8 +257,8 @@ public class MusicProfiles implements ProgramDescription {
 
 	private static boolean parseParameters(String[] args) {
 
-		if(args.length > 0) {
-			if(args.length != 6) {
+		if (args.length > 0) {
+			if (args.length != 6) {
 				System.err.println("Usage: MusicProfiles <input user song triplets path>" +
 						" <input song mismatches path> <output top tracks path> "
 						+ "<playcount threshold> <output communities path> <num iterations>");

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java
index 6be8116..a88f80e 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PageRank.java
@@ -35,7 +35,7 @@ import org.apache.flink.types.LongValue;
  * This is an implementation of a simple PageRank algorithm, using a scatter-gather iteration.
  * The user can define the damping factor and the maximum number of iterations.
  *
- * The implementation assumes that each page has at least one incoming and one outgoing link.
+ * <p>The implementation assumes that each page has at least one incoming and one outgoing link.
  */
 public class PageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet<Vertex<K, Double>>> {
 
@@ -45,8 +45,8 @@ public class PageRank<K> implements GraphAlgorithm<K, Double, Double, DataSet<Ve
 	/**
 	 * Creates an instance of the PageRank algorithm.
 	 *
-	 * The implementation assumes that each page has at least one incoming and one outgoing link.
-	 * 
+	 * <p>The implementation assumes that each page has at least one incoming and one outgoing link.
+	 *
 	 * @param beta the damping factor
 	 * @param maxIterations the maximum number of iterations
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java
index e647653..97a7c5f 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/PregelSSSP.java
@@ -33,18 +33,18 @@ import org.apache.flink.graph.utils.Tuple3ToEdgeMap;
 
 /**
  * This example shows how to use Gelly's Vertex-Centric iterations.
- * 
- * It is an implementation of the Single-Source-Shortest-Paths algorithm.
+ *
+ * <p>It is an implementation of the Single-Source-Shortest-Paths algorithm.
  * For a scatter-gather implementation of the same algorithm, please refer to {@link SingleSourceShortestPaths}
- * and for a gather-sum-apply implementation see {@link GSASingleSourceShortestPaths}.  
+ * and for a gather-sum-apply implementation see {@link GSASingleSourceShortestPaths}.
  *
- * The input file is a plain text file and must be formatted as follows:
+ * <p>The input file is a plain text file and must be formatted as follows:
  * Edges are represented by tuples of srcVertexId, trgVertexId, distance which are
  * separated by tabs. Edges themselves are separated by newlines.
  * For example: <code>1\t2\t0.1\n1\t3\t1.4\n</code> defines two edges,
  * edge 1-2 with distance 0.1, and edge 1-3 with distance 1.4.
  *
- * If no parameters are provided, the program is run with default data from
+ * <p>If no parameters are provided, the program is run with default data from
  * {@link org.apache.flink.graph.examples.data.SingleSourceShortestPathsData}
  */
 public class PregelSSSP implements ProgramDescription {
@@ -63,7 +63,7 @@ public class PregelSSSP implements ProgramDescription {
 
 		// Execute the vertex-centric iteration
 		Graph<Long, Double, Double> result = graph.runVertexCentricIteration(
-				new SSSPComputeFunction(srcVertexId), new SSSPCombiner(), 
+				new SSSPComputeFunction(srcVertexId), new SSSPCombiner(),
 				maxIterations);
 
 		// Extract the vertices as the result
@@ -86,11 +86,13 @@ public class PregelSSSP implements ProgramDescription {
 	@SuppressWarnings("serial")
 	private static final class InitVertices implements MapFunction<Long, Double> {
 
-		public Double map(Long id) { return Double.POSITIVE_INFINITY; }
+		public Double map(Long id) {
+			return Double.POSITIVE_INFINITY;
+		}
 	}
 
 	/**
-	 * The compute function for SSSP
+	 * The compute function for SSSP.
 	 */
 	@SuppressWarnings("serial")
 	public static final class SSSPComputeFunction extends ComputeFunction<Long, Double, Double, Double> {
@@ -141,7 +143,7 @@ public class PregelSSSP implements ProgramDescription {
 
 	private static boolean fileOutput = false;
 
-	private static Long srcVertexId = 1l;
+	private static Long srcVertexId = 1L;
 
 	private static String edgesInputPath = null;
 
@@ -151,8 +153,8 @@ public class PregelSSSP implements ProgramDescription {
 
 	private static boolean parseParameters(String[] args) {
 
-		if(args.length > 0) {
-			if(args.length != 4) {
+		if (args.length > 0) {
+			if (args.length != 4) {
 				System.err.println("Usage: PregelSSSP <source vertex id>" +
 						" <input edges path> <output path> <num iterations>");
 				return false;
@@ -191,4 +193,4 @@ public class PregelSSSP implements ProgramDescription {
 	public String getDescription() {
 		return "Vertex-centric Single Source Shortest Paths";
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java
index 68d20e0..07c6b56 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/SingleSourceShortestPaths.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.examples;
 
-import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData;
 import org.apache.flink.api.common.ProgramDescription;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.DataSet;
@@ -26,6 +25,7 @@ import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
+import org.apache.flink.graph.examples.data.SingleSourceShortestPathsData;
 import org.apache.flink.graph.spargel.GatherFunction;
 import org.apache.flink.graph.spargel.MessageIterator;
 import org.apache.flink.graph.spargel.ScatterFunction;
@@ -33,17 +33,17 @@ import org.apache.flink.graph.utils.Tuple3ToEdgeMap;
 
 /**
  * This example shows how to use Gelly's scatter-gather iterations.
- * 
- * It is an implementation of the Single-Source-Shortest-Paths algorithm.
- * For a gather-sum-apply implementation of the same algorithm, please refer to {@link GSASingleSourceShortestPaths}. 
  *
- * The input file is a plain text file and must be formatted as follows:
+ * <p>It is an implementation of the Single-Source-Shortest-Paths algorithm.
+ * For a gather-sum-apply implementation of the same algorithm, please refer to {@link GSASingleSourceShortestPaths}.
+ *
+ * <p>The input file is a plain text file and must be formatted as follows:
  * Edges are represented by tuples of srcVertexId, trgVertexId, distance which are
  * separated by tabs. Edges themselves are separated by newlines.
  * For example: <code>1\t2\t0.1\n1\t3\t1.4\n</code> defines two edges,
  * edge 1-2 with distance 0.1, and edge 1-3 with distance 1.4.
  *
- * If no parameters are provided, the program is run with default data from
+ * <p>If no parameters are provided, the program is run with default data from
  * {@link SingleSourceShortestPathsData}
  */
 public class SingleSourceShortestPaths implements ProgramDescription {
@@ -149,7 +149,7 @@ public class SingleSourceShortestPaths implements ProgramDescription {
 
 	private static boolean fileOutput = false;
 
-	private static Long srcVertexId = 1l;
+	private static Long srcVertexId = 1L;
 
 	private static String edgesInputPath = null;
 
@@ -159,8 +159,8 @@ public class SingleSourceShortestPaths implements ProgramDescription {
 
 	private static boolean parseParameters(String[] args) {
 
-		if(args.length > 0) {
-			if(args.length != 4) {
+		if (args.length > 0) {
+			if (args.length != 4) {
 				System.err.println("Usage: SingleSourceShortestPaths <source vertex id>" +
 						" <input edges path> <output path> <num iterations>");
 				return false;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java
index d3ddfd8..1d9b257 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/CommunityDetectionData.java
@@ -37,7 +37,7 @@ public class CommunityDetectionData {
 	public static final double DELTA = 0.5f;
 
 	public static final String COMMUNITIES_SINGLE_ITERATION = "1,5\n" + "2,6\n"
-			+ "3,1\n" + "4,1\n" + "5,1\n" + "6,8\n" + "7,8\n" + "8,7"; 
+			+ "3,1\n" + "4,1\n" + "5,1\n" + "6,8\n" + "7,8\n" + "8,7";
 
 	public static final String COMMUNITIES_WITH_TIE = "1,2\n" + "2,1\n" + "3,1\n" + "4,1\n" + "5,1";
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/ConnectedComponentsDefaultData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/ConnectedComponentsDefaultData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/ConnectedComponentsDefaultData.java
index d9fb5cc..5b1e88f 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/ConnectedComponentsDefaultData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/ConnectedComponentsDefaultData.java
@@ -46,7 +46,7 @@ public class ConnectedComponentsDefaultData {
 	public static DataSet<Edge<Long, NullValue>> getDefaultEdgeDataSet(ExecutionEnvironment env) {
 		List<Edge<Long, NullValue>> edgeList = new LinkedList<>();
 		for (Object[] edge : DEFAULT_EDGES) {
-			edgeList.add(new Edge<>((long)edge[0], (long)edge[1], NullValue.getInstance()));
+			edgeList.add(new Edge<>((long) edge[0], (long) edge[1], NullValue.getInstance()));
 		}
 		return env.fromCollection(edgeList);
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java
index 2b4277d..1ac9272 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/EuclideanGraphData.java
@@ -41,7 +41,7 @@ public class EuclideanGraphData {
 	public static DataSet<Vertex<Long, EuclideanGraphWeighing.Point>> getDefaultVertexDataSet(ExecutionEnvironment env) {
 
 		List<Vertex<Long, EuclideanGraphWeighing.Point>> vertices = new ArrayList<Vertex<Long, EuclideanGraphWeighing.Point>>();
-		for(int i=1; i<=NUM_VERTICES; i++) {
+		for (int i = 1; i <= NUM_VERTICES; i++) {
 			vertices.add(new Vertex<Long, EuclideanGraphWeighing.Point>(new Long(i),
 					new EuclideanGraphWeighing.Point(new Double(i), new Double(i))));
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java
index 99e363a..eaa2cde 100644
--- a/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java
+++ b/flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/IncrementalSSSPData.java
@@ -88,7 +88,7 @@ public class IncrementalSSSPData {
 		return new Edge<Long, Double>(2L, 5L, 2.0);
 	}
 
-	public static final String RESULTED_VERTICES = "1," + Double.MAX_VALUE + "\n" + "2," + Double.MAX_VALUE+ "\n"
+	public static final String RESULTED_VERTICES = "1," + Double.MAX_VALUE + "\n" + "2," + Double.MAX_VALUE + "\n"
 			+ "3," + Double.MAX_VALUE + "\n" + "4,1.0\n" + "5,0.0";
 
 	private IncrementalSSSPData() {}


[14/15] flink git commit: [FLINK-6701] Activate strict checkstyle for flink-yarn

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManager.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManager.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManager.java
index 63e6a4c..6099d18 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManager.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManager.java
@@ -37,75 +37,74 @@ import org.apache.flink.runtime.resourcemanager.exceptions.ResourceManagerExcept
 import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManager;
 import org.apache.flink.runtime.rpc.FatalErrorHandler;
 import org.apache.flink.runtime.rpc.RpcService;
+import org.apache.flink.util.ExceptionUtils;
+
 import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.NodeReport;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.client.api.AMRMClient;
 import org.apache.hadoop.yarn.client.api.NMClient;
 import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.FiniteDuration;
-import org.apache.flink.util.ExceptionUtils;
 
-import java.util.Map;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import scala.concurrent.duration.FiniteDuration;
+
 /**
  * The yarn implementation of the resource manager. Used when the system is started
  * via the resource framework YARN.
  */
 public class YarnResourceManager extends ResourceManager<ResourceID> implements AMRMClientAsync.CallbackHandler {
-	protected final Logger LOG = LoggerFactory.getLogger(getClass());
 
-	/** The process environment variables */
-	private final Map<String, String> ENV;
+	/** The process environment variables. */
+	private final Map<String, String> env;
 
 	/** The default registration timeout for task executor in seconds. */
-	private final static int DEFAULT_TASK_MANAGER_REGISTRATION_DURATION = 300;
+	private static final int DEFAULT_TASK_MANAGER_REGISTRATION_DURATION = 300;
 
-	/** The heartbeat interval while the resource master is waiting for containers */
+	/** The heartbeat interval while the resource master is waiting for containers. */
 	private static final int FAST_YARN_HEARTBEAT_INTERVAL_MS = 500;
 
-	/** The default heartbeat interval during regular operation */
+	/** The default heartbeat interval during regular operation. */
 	private static final int DEFAULT_YARN_HEARTBEAT_INTERVAL_MS = 5000;
 
-	/** The default memory of task executor to allocate (in MB) */
+	/** The default memory of task executor to allocate (in MB). */
 	private static final int DEFAULT_TSK_EXECUTOR_MEMORY_SIZE = 1024;
 
 	/** Environment variable name of the final container id used by the YarnResourceManager.
 	 * Container ID generation may vary across Hadoop versions. */
-	final static String ENV_FLINK_CONTAINER_ID = "_FLINK_CONTAINER_ID";
-	
+	static final String ENV_FLINK_CONTAINER_ID = "_FLINK_CONTAINER_ID";
+
 	/** Environment variable name of the hostname given by the YARN.
 	 * In task executor we use the hostnames given by YARN consistently throughout akka */
-	final static String ENV_FLINK_NODE_ID = "_FLINK_NODE_ID";
+	static final String ENV_FLINK_NODE_ID = "_FLINK_NODE_ID";
 
-	/** Default heartbeat interval between this resource manager and the YARN ResourceManager */
+	/** Default heartbeat interval between this resource manager and the YARN ResourceManager. */
 	private final int yarnHeartbeatIntervalMillis;
 
 	private final Configuration flinkConfig;
 
 	private final YarnConfiguration yarnConfig;
 
-	/** Client to communicate with the Resource Manager (YARN's master) */
+	/** Client to communicate with the Resource Manager (YARN's master). */
 	private AMRMClientAsync<AMRMClient.ContainerRequest> resourceManagerClient;
 
-	/** Client to communicate with the Node manager and launch TaskExecutor processes */
+	/** Client to communicate with the Node manager and launch TaskExecutor processes. */
 	private NMClient nodeManagerClient;
 
-	/** The number of containers requested, but not yet granted */
+	/** The number of containers requested, but not yet granted. */
 	private int numPendingContainerRequests;
 
-	final private Map<ResourceProfile, Integer> resourcePriorities = new HashMap<>();
+	private final Map<ResourceProfile, Integer> resourcePriorities = new HashMap<>();
 
 	public YarnResourceManager(
 			RpcService rpcService,
@@ -133,7 +132,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 			fatalErrorHandler);
 		this.flinkConfig  = flinkConfig;
 		this.yarnConfig = new YarnConfiguration();
-		this.ENV = env;
+		this.env = env;
 		final int yarnHeartbeatIntervalMS = flinkConfig.getInteger(
 				ConfigConstants.YARN_HEARTBEAT_DELAY_SECONDS, DEFAULT_YARN_HEARTBEAT_INTERVAL_MS / 1000) * 1000;
 
@@ -161,7 +160,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 			//TODO: the third paramter should be the webmonitor address
 			resourceManagerClient.registerApplicationMaster(hostPort.f0, hostPort.f1, getAddress());
 		} catch (Exception e) {
-			LOG.info("registerApplicationMaster fail", e);
+			log.info("registerApplicationMaster fail", e);
 		}
 
 		// create the client to communicate with the node managers
@@ -204,11 +203,11 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 
 		// first, de-register from YARN
 		FinalApplicationStatus yarnStatus = getYarnStatus(finalStatus);
-		LOG.info("Unregistering application from the YARN Resource Manager");
+		log.info("Unregistering application from the YARN Resource Manager");
 		try {
 			resourceManagerClient.unregisterApplicationMaster(yarnStatus, optionalDiagnostics, "");
 		} catch (Throwable t) {
-			LOG.error("Could not unregister the application master.", t);
+			log.error("Could not unregister the application master.", t);
 		}
 	}
 
@@ -217,8 +216,8 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 		// Priority for worker containers - priorities are intra-application
 		//TODO: set priority according to the resource allocated
 		Priority priority = Priority.newInstance(generatePriority(resourceProfile));
-		int mem = resourceProfile.getMemoryInMB() < 0 ? DEFAULT_TSK_EXECUTOR_MEMORY_SIZE : (int)resourceProfile.getMemoryInMB();
-		int vcore = resourceProfile.getCpuCores() < 1 ? 1 : (int)resourceProfile.getCpuCores();
+		int mem = resourceProfile.getMemoryInMB() < 0 ? DEFAULT_TSK_EXECUTOR_MEMORY_SIZE : (int) resourceProfile.getMemoryInMB();
+		int vcore = resourceProfile.getCpuCores() < 1 ? 1 : (int) resourceProfile.getCpuCores();
 		Resource capability = Resource.newInstance(mem, vcore);
 		requestYarnContainer(capability, priority);
 	}
@@ -254,7 +253,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 	public void onContainersAllocated(List<Container> containers) {
 		for (Container container : containers) {
 			numPendingContainerRequests = Math.max(0, numPendingContainerRequests - 1);
-			LOG.info("Received new container: {} - Remaining pending container requests: {}",
+			log.info("Received new container: {} - Remaining pending container requests: {}",
 					container.getId(), numPendingContainerRequests);
 			try {
 				/** Context information used to start a TaskExecutor Java process */
@@ -264,7 +263,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 			}
 			catch (Throwable t) {
 				// failed to launch the container, will release the failed one and ask for a new one
-				LOG.error("Could not start TaskManager in container {},", container, t);
+				log.error("Could not start TaskManager in container {},", container, t);
 				resourceManagerClient.releaseAssignedContainer(container.getId());
 				requestYarnContainer(container.getResource(), container.getPriority());
 			}
@@ -279,7 +278,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 		try {
 			shutDown();
 		} catch (Exception e) {
-			LOG.warn("Fail to shutdown the YARN resource manager.", e);
+			log.warn("Fail to shutdown the YARN resource manager.", e);
 		}
 	}
 
@@ -317,7 +316,7 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 		}
 	}
 
-	// parse the host and port from akka address, 
+	// parse the host and port from akka address,
 	// the akka address is like akka.tcp://flink@100.81.153.180:49712/user/$a
 	private static Tuple2<String, Integer> parseHostPort(String address) {
 		String[] hostPort = address.split("@")[1].split(":");
@@ -333,35 +332,35 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 		resourceManagerClient.setHeartbeatInterval(FAST_YARN_HEARTBEAT_INTERVAL_MS);
 
 		numPendingContainerRequests++;
-		LOG.info("Requesting new TaskManager container pending requests: {}",
+		log.info("Requesting new TaskManager container pending requests: {}",
 				numPendingContainerRequests);
 	}
 
 	private ContainerLaunchContext createTaskExecutorLaunchContext(Resource resource, String containerId, String host)
 			throws Exception {
 		// init the ContainerLaunchContext
-		final String currDir = ENV.get(ApplicationConstants.Environment.PWD.key());
+		final String currDir = env.get(ApplicationConstants.Environment.PWD.key());
 
 		final ContaineredTaskManagerParameters taskManagerParameters =
 				ContaineredTaskManagerParameters.create(flinkConfig, resource.getMemory(), 1);
 
-		LOG.info("TaskExecutor{} will be started with container size {} MB, JVM heap size {} MB, " +
+		log.info("TaskExecutor{} will be started with container size {} MB, JVM heap size {} MB, " +
 				"JVM direct memory limit {} MB",
 				containerId,
 				taskManagerParameters.taskManagerTotalMemoryMB(),
 				taskManagerParameters.taskManagerHeapSizeMB(),
 				taskManagerParameters.taskManagerDirectMemoryLimitMB());
-		int timeout = flinkConfig.getInteger(ConfigConstants.TASK_MANAGER_MAX_REGISTRATION_DURATION, 
+		int timeout = flinkConfig.getInteger(ConfigConstants.TASK_MANAGER_MAX_REGISTRATION_DURATION,
 				DEFAULT_TASK_MANAGER_REGISTRATION_DURATION);
 		FiniteDuration teRegistrationTimeout = new FiniteDuration(timeout, TimeUnit.SECONDS);
 		final Configuration taskManagerConfig = BootstrapTools.generateTaskManagerConfiguration(
 				flinkConfig, "", 0, 1, teRegistrationTimeout);
-		LOG.debug("TaskManager configuration: {}", taskManagerConfig);
+		log.debug("TaskManager configuration: {}", taskManagerConfig);
 
 		ContainerLaunchContext taskExecutorLaunchContext = Utils.createTaskExecutorContext(
-				flinkConfig, yarnConfig, ENV,
+				flinkConfig, yarnConfig, env,
 				taskManagerParameters, taskManagerConfig,
-				currDir, YarnTaskExecutorRunner.class, LOG);
+				currDir, YarnTaskExecutorRunner.class, log);
 
 		// set a special environment variable to uniquely identify this container
 		taskExecutorLaunchContext.getEnvironment()
@@ -373,7 +372,6 @@ public class YarnResourceManager extends ResourceManager<ResourceID> implements
 
 
 
-	
 	/**
 	 * Generate priority by given resource profile.
 	 * Priority is only used for distinguishing request of different resource.

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerCallbackHandler.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerCallbackHandler.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerCallbackHandler.java
index 2372cbc..62729a4 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerCallbackHandler.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnResourceManagerCallbackHandler.java
@@ -18,11 +18,11 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
 import org.apache.flink.runtime.clusterframework.messages.FatalErrorOccurred;
 import org.apache.flink.yarn.messages.ContainersAllocated;
 import org.apache.flink.yarn.messages.ContainersComplete;
 
+import akka.actor.ActorRef;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
 import org.apache.hadoop.yarn.api.records.NodeReport;
@@ -37,16 +37,16 @@ import java.util.List;
  */
 public class YarnResourceManagerCallbackHandler implements AMRMClientAsync.CallbackHandler {
 
-	/** The yarn master to which we report the callbacks */
+	/** The yarn master to which we report the callbacks. */
 	private ActorRef yarnFrameworkMaster;
 
-	/** The progress we report */
+	/** The progress we report. */
 	private float currentProgress;
 
 	public YarnResourceManagerCallbackHandler() {
 		this(null);
 	}
-	
+
 	public YarnResourceManagerCallbackHandler(ActorRef yarnFrameworkMaster) {
 		this.yarnFrameworkMaster = yarnFrameworkMaster;
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java
index 398a5eb..2ed4c1d 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskExecutorRunner.java
@@ -37,6 +37,7 @@ import org.apache.flink.runtime.util.EnvironmentInformation;
 import org.apache.flink.runtime.util.JvmShutdownSafeguard;
 import org.apache.flink.runtime.util.SignalHandler;
 import org.apache.flink.util.Preconditions;
+
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -52,13 +53,12 @@ import java.util.concurrent.Callable;
  */
 public class YarnTaskExecutorRunner {
 
-	/** Logger */
 	protected static final Logger LOG = LoggerFactory.getLogger(YarnTaskExecutorRunner.class);
 
-	/** The process environment variables */
+	/** The process environment variables. */
 	private static final Map<String, String> ENV = System.getenv();
 
-	/** The exit code returned if the initialization of the yarn task executor runner failed */
+	/** The exit code returned if the initialization of the yarn task executor runner failed. */
 	private static final int INIT_ERROR_EXIT_CODE = 31;
 
 	private MetricRegistry metricRegistry;
@@ -131,7 +131,7 @@ public class YarnTaskExecutorRunner {
 			configuration.setBoolean(AkkaOptions.JVM_EXIT_ON_FATAL_ERROR, true);
 
 			String keytabPath = null;
-			if(remoteKeytabPath != null) {
+			if (remoteKeytabPath != null) {
 				File f = new File(currDir, Utils.KEYTAB_FILE_NAME);
 				keytabPath = f.getAbsolutePath();
 				LOG.info("keytab path: {}", keytabPath);
@@ -252,7 +252,6 @@ public class YarnTaskExecutorRunner {
 	//  Utilities
 	// ------------------------------------------------------------------------
 
-
 	protected void shutdown() {
 			if (taskExecutorRpcService != null) {
 				try {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskManagerRunner.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskManagerRunner.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskManagerRunner.java
index 047a1fa..265c5a6 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskManagerRunner.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnTaskManagerRunner.java
@@ -18,11 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.Callable;
-
 import org.apache.flink.configuration.AkkaOptions;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
@@ -31,17 +26,21 @@ import org.apache.flink.runtime.clusterframework.types.ResourceID;
 import org.apache.flink.runtime.security.SecurityUtils;
 import org.apache.flink.runtime.taskmanager.TaskManager;
 import org.apache.flink.runtime.util.EnvironmentInformation;
-
 import org.apache.flink.runtime.util.JvmShutdownSafeguard;
 import org.apache.flink.runtime.util.SignalHandler;
 import org.apache.flink.util.Preconditions;
+
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
 /**
  * The entry point for running a TaskManager in a YARN container.
  */
@@ -95,7 +94,7 @@ public class YarnTaskManagerRunner {
 		configuration.setBoolean(AkkaOptions.JVM_EXIT_ON_FATAL_ERROR, true);
 
 		String localKeytabPath = null;
-		if(remoteKeytabPath != null) {
+		if (remoteKeytabPath != null) {
 			File f = new File(currDir, Utils.KEYTAB_FILE_NAME);
 			localKeytabPath = f.getAbsolutePath();
 			LOG.info("localKeytabPath: {}", localKeytabPath);
@@ -104,7 +103,7 @@ public class YarnTaskManagerRunner {
 		UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
 
 		LOG.info("YARN daemon is running as: {} Yarn client user obtainer: {}",
-				currentUser.getShortUserName(), yarnClientUsername );
+				currentUser.getShortUserName(), yarnClientUsername);
 
 		// Infer the resource identifier from the environment variable
 		String containerID = Preconditions.checkNotNull(envs.get(YarnFlinkResourceManager.ENV_FLINK_CONTAINER_ID));
@@ -153,7 +152,7 @@ public class YarnTaskManagerRunner {
 					return null;
 				}
 			});
-		} catch(Exception e) {
+		} catch (Exception e) {
 			LOG.error("Exception occurred while launching Task Manager", e);
 			throw new RuntimeException(e);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnCLI.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnCLI.java b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnCLI.java
index 6ce8d17..aaa9bac 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnCLI.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnCLI.java
@@ -15,19 +15,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn.cli;
 
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.client.cli.CliFrontendParser;
 import org.apache.flink.client.cli.CustomCommandLine;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.YarnClusterClientV2;
 import org.apache.flink.yarn.YarnClusterDescriptorV2;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,27 +50,27 @@ import static org.apache.flink.client.cli.CliFrontendParser.ADDRESS_OPTION;
 public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 	private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnCLI.class);
 
-	/** The id for the CommandLine interface */
+	/** The id for the CommandLine interface. */
 	private static final String ID = "yarn";
 
 	private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split()
 
 	//------------------------------------ Command Line argument options -------------------------
 	// the prefix transformation is used by the CliFrontend static constructor.
-	private final Option QUEUE;
-	private final Option SHIP_PATH;
-	private final Option FLINK_JAR;
-	private final Option JM_MEMORY;
-	private final Option DETACHED;
-	private final Option ZOOKEEPER_NAMESPACE;
+	private final Option queue;
+	private final Option shipPath;
+	private final Option flinkJar;
+	private final Option jmMemory;
+	private final Option detached;
+	private final Option zookeeperNamespace;
 
-	private final Options ALL_OPTIONS;
+	private final Options allOptions;
 
 	/**
 	 * Dynamic properties allow the user to specify additional configuration values with -D, such as
-	 * <tt> -Dfs.overwrite-files=true  -Dtaskmanager.network.memory.min=536346624</tt>
+	 * <tt> -Dfs.overwrite-files=true  -Dtaskmanager.network.memory.min=536346624</tt>.
 	 */
-	private final Option DYNAMIC_PROPERTIES;
+	private final Option dynamicProperties;
 
 	//------------------------------------ Internal fields -------------------------
 	// use detach mode as default
@@ -76,22 +78,22 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 
 	public FlinkYarnCLI(String shortPrefix, String longPrefix) {
 
-		QUEUE = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
-		SHIP_PATH = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
-		FLINK_JAR = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
-		JM_MEMORY = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
-		DYNAMIC_PROPERTIES = new Option(shortPrefix + "D", true, "Dynamic properties");
-		DETACHED = new Option(shortPrefix + "a", longPrefix + "attached", false, "Start attached");
-		ZOOKEEPER_NAMESPACE = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
-
-		ALL_OPTIONS = new Options();
-		ALL_OPTIONS.addOption(FLINK_JAR);
-		ALL_OPTIONS.addOption(JM_MEMORY);
-		ALL_OPTIONS.addOption(QUEUE);
-		ALL_OPTIONS.addOption(SHIP_PATH);
-		ALL_OPTIONS.addOption(DYNAMIC_PROPERTIES);
-		ALL_OPTIONS.addOption(DETACHED);
-		ALL_OPTIONS.addOption(ZOOKEEPER_NAMESPACE);
+		queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
+		shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
+		flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
+		jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
+		dynamicProperties = new Option(shortPrefix + "D", true, "Dynamic properties");
+		detached = new Option(shortPrefix + "a", longPrefix + "attached", false, "Start attached");
+		zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
+
+		allOptions = new Options();
+		allOptions.addOption(flinkJar);
+		allOptions.addOption(jmMemory);
+		allOptions.addOption(queue);
+		allOptions.addOption(shipPath);
+		allOptions.addOption(dynamicProperties);
+		allOptions.addOption(detached);
+		allOptions.addOption(zookeeperNamespace);
 	}
 
 	public YarnClusterDescriptorV2 createDescriptor(String defaultApplicationName, CommandLine cmd) {
@@ -100,8 +102,8 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 
 		// Jar Path
 		Path localJarPath;
-		if (cmd.hasOption(FLINK_JAR.getOpt())) {
-			String userPath = cmd.getOptionValue(FLINK_JAR.getOpt());
+		if (cmd.hasOption(flinkJar.getOpt())) {
+			String userPath = cmd.getOptionValue(flinkJar.getOpt());
 			if (!userPath.startsWith("file://")) {
 				userPath = "file://" + userPath;
 			}
@@ -117,7 +119,7 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 				localJarPath = new Path(new File(decodedPath).toURI());
 			} catch (UnsupportedEncodingException e) {
 				throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath +
-					" Please supply a path manually via the -" + FLINK_JAR.getOpt() + " option.");
+					" Please supply a path manually via the -" + flinkJar.getOpt() + " option.");
 			}
 		}
 
@@ -125,8 +127,8 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 
 		List<File> shipFiles = new ArrayList<>();
 		// path to directory to ship
-		if (cmd.hasOption(SHIP_PATH.getOpt())) {
-			String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt());
+		if (cmd.hasOption(shipPath.getOpt())) {
+			String shipPath = cmd.getOptionValue(this.shipPath.getOpt());
 			File shipDir = new File(shipPath);
 			if (shipDir.isDirectory()) {
 				shipFiles.add(shipDir);
@@ -138,36 +140,36 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 		yarnClusterDescriptor.addShipFiles(shipFiles);
 
 		// queue
-		if (cmd.hasOption(QUEUE.getOpt())) {
-			yarnClusterDescriptor.setQueue(cmd.getOptionValue(QUEUE.getOpt()));
+		if (cmd.hasOption(queue.getOpt())) {
+			yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt()));
 		}
 
 		// JobManager Memory
-		if (cmd.hasOption(JM_MEMORY.getOpt())) {
-			int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt()));
+		if (cmd.hasOption(jmMemory.getOpt())) {
+			int jmMemory = Integer.valueOf(cmd.getOptionValue(this.jmMemory.getOpt()));
 			yarnClusterDescriptor.setJobManagerMemory(jmMemory);
 		}
 
 		String[] dynamicProperties = null;
-		if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) {
-			dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt());
+		if (cmd.hasOption(this.dynamicProperties.getOpt())) {
+			dynamicProperties = cmd.getOptionValues(this.dynamicProperties.getOpt());
 		}
 		String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR);
 
 		yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded);
 
-		if (cmd.hasOption(DETACHED.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
+		if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
 			// TODO: not support non detach mode now.
 			//this.detachedMode = false;
 		}
 		yarnClusterDescriptor.setDetachedMode(this.detachedMode);
 
-		if(defaultApplicationName != null) {
+		if (defaultApplicationName != null) {
 			yarnClusterDescriptor.setName(defaultApplicationName);
 		}
 
-		if (cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt())) {
-			String zookeeperNamespace = cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt());
+		if (cmd.hasOption(zookeeperNamespace.getOpt())) {
+			String zookeeperNamespace = cmd.getOptionValue(this.zookeeperNamespace.getOpt());
 			yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace);
 		}
 
@@ -201,7 +203,7 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 
 	@Override
 	public void addRunOptions(Options baseOptions) {
-		for (Object option : ALL_OPTIONS.getOptions()) {
+		for (Object option : allOptions.getOptions()) {
 			baseOptions.addOption((Option) option);
 		}
 	}
@@ -233,9 +235,6 @@ public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
 		return new YarnClusterClientV2(yarnClusterDescriptor, config);
 	}
 
-	/**
-	 * Utility method
-	 */
 	private void logAndSysout(String message) {
 		LOG.info(message);
 		System.out.println(message);

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
index 53253d6..f15314a 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
@@ -15,15 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn.cli;
 
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.PosixParser;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.flink.client.cli.CliFrontendParser;
 import org.apache.flink.client.cli.CustomCommandLine;
 import org.apache.flink.configuration.ConfigConstants;
@@ -38,6 +32,14 @@ import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.AbstractYarnClusterDescriptor;
 import org.apache.flink.yarn.YarnClusterClient;
 import org.apache.flink.yarn.YarnClusterDescriptor;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.PosixParser;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.slf4j.Logger;
@@ -79,7 +81,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 
 	private static final int CLIENT_POLLING_INTERVALL = 3;
 
-	/** The id for the CommandLine interface */
+	/** The id for the CommandLine interface. */
 	private static final String ID = "yarn-cluster";
 
 	// YARN-session related constants
@@ -92,19 +94,19 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 
 	//------------------------------------ Command Line argument options -------------------------
 	// the prefix transformation is used by the CliFrontend static constructor.
-	private final Option QUERY;
+	private final Option query;
 	// --- or ---
-	private final Option APPLICATION_ID;
+	private final Option applicationId;
 	// --- or ---
-	private final Option QUEUE;
-	private final Option SHIP_PATH;
-	private final Option FLINK_JAR;
-	private final Option JM_MEMORY;
-	private final Option TM_MEMORY;
-	private final Option CONTAINER;
-	private final Option SLOTS;
-	private final Option DETACHED;
-	private final Option ZOOKEEPER_NAMESPACE;
+	private final Option queue;
+	private final Option shipPath;
+	private final Option flinkJar;
+	private final Option jmMemory;
+	private final Option tmMemory;
+	private final Option container;
+	private final Option slots;
+	private final Option detached;
+	private final Option zookeeperNamespace;
 
 	/**
 	 * @deprecated Streaming mode has been deprecated without replacement. Set the
@@ -112,16 +114,16 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 	 * key to true to get the previous batch mode behaviour.
 	 */
 	@Deprecated
-	private final Option STREAMING;
-	private final Option NAME;
+	private final Option streaming;
+	private final Option name;
 
-	private final Options ALL_OPTIONS;
+	private final Options allOptions;
 
 	/**
 	 * Dynamic properties allow the user to specify additional configuration values with -D, such as
-	 * <tt> -Dfs.overwrite-files=true  -Dtaskmanager.network.memory.min=536346624</tt>
+	 * <tt> -Dfs.overwrite-files=true  -Dtaskmanager.network.memory.min=536346624</tt>.
 	 */
-	private final Option DYNAMIC_PROPERTIES;
+	private final Option dynamicproperties;
 
 	private final boolean acceptInteractiveInput;
 
@@ -136,41 +138,40 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 	public FlinkYarnSessionCli(String shortPrefix, String longPrefix, boolean acceptInteractiveInput) {
 		this.acceptInteractiveInput = acceptInteractiveInput;
 
-		QUERY = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)");
-		APPLICATION_ID = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session");
-		QUEUE = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
-		SHIP_PATH = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
-		FLINK_JAR = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
-		JM_MEMORY = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
-		TM_MEMORY = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]");
-		CONTAINER = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)");
-		SLOTS = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager");
-		DYNAMIC_PROPERTIES = new Option(shortPrefix + "D", true, "Dynamic properties");
-		DETACHED = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached");
-		STREAMING = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode");
-		NAME = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN");
-		ZOOKEEPER_NAMESPACE = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
-
-		ALL_OPTIONS = new Options();
-		ALL_OPTIONS.addOption(FLINK_JAR);
-		ALL_OPTIONS.addOption(JM_MEMORY);
-		ALL_OPTIONS.addOption(TM_MEMORY);
-		ALL_OPTIONS.addOption(CONTAINER);
-		ALL_OPTIONS.addOption(QUEUE);
-		ALL_OPTIONS.addOption(QUERY);
-		ALL_OPTIONS.addOption(SHIP_PATH);
-		ALL_OPTIONS.addOption(SLOTS);
-		ALL_OPTIONS.addOption(DYNAMIC_PROPERTIES);
-		ALL_OPTIONS.addOption(DETACHED);
-		ALL_OPTIONS.addOption(STREAMING);
-		ALL_OPTIONS.addOption(NAME);
-		ALL_OPTIONS.addOption(APPLICATION_ID);
-		ALL_OPTIONS.addOption(ZOOKEEPER_NAMESPACE);
+		query = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)");
+		applicationId = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session");
+		queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
+		shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
+		flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
+		jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
+		tmMemory = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]");
+		container = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)");
+		slots = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager");
+		dynamicproperties = new Option(shortPrefix + "D", true, "Dynamic properties");
+		detached = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached");
+		streaming = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode");
+		name = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN");
+		zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
+
+		allOptions = new Options();
+		allOptions.addOption(flinkJar);
+		allOptions.addOption(jmMemory);
+		allOptions.addOption(tmMemory);
+		allOptions.addOption(container);
+		allOptions.addOption(queue);
+		allOptions.addOption(query);
+		allOptions.addOption(shipPath);
+		allOptions.addOption(slots);
+		allOptions.addOption(dynamicproperties);
+		allOptions.addOption(detached);
+		allOptions.addOption(streaming);
+		allOptions.addOption(name);
+		allOptions.addOption(applicationId);
+		allOptions.addOption(zookeeperNamespace);
 	}
 
-
 	/**
-	 * Tries to load a Flink Yarn properties file and returns the Yarn application id if successful
+	 * Tries to load a Flink Yarn properties file and returns the Yarn application id if successful.
 	 * @param cmdLine The command-line parameters
 	 * @param flinkConfiguration The flink configuration
 	 * @return Yarn application id or null if none could be retrieved
@@ -184,8 +185,8 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 		}
 
 		for (Option option : cmdLine.getOptions()) {
-			if (ALL_OPTIONS.hasOption(option.getOpt())) {
-				if (!option.getOpt().equals(DETACHED.getOpt())) {
+			if (allOptions.hasOption(option.getOpt())) {
+				if (!option.getOpt().equals(detached.getOpt())) {
 					// don't resume from properties file if yarn options have been specified
 					return null;
 				}
@@ -257,17 +258,17 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 
 		AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor();
 
-		if (!cmd.hasOption(CONTAINER.getOpt())) { // number of containers is required option!
-			LOG.error("Missing required argument {}", CONTAINER.getOpt());
+		if (!cmd.hasOption(container.getOpt())) { // number of containers is required option!
+			LOG.error("Missing required argument {}", container.getOpt());
 			printUsage();
-			throw new IllegalArgumentException("Missing required argument " + CONTAINER.getOpt());
+			throw new IllegalArgumentException("Missing required argument " + container.getOpt());
 		}
-		yarnClusterDescriptor.setTaskManagerCount(Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt())));
+		yarnClusterDescriptor.setTaskManagerCount(Integer.valueOf(cmd.getOptionValue(container.getOpt())));
 
 		// Jar Path
 		Path localJarPath;
-		if (cmd.hasOption(FLINK_JAR.getOpt())) {
-			String userPath = cmd.getOptionValue(FLINK_JAR.getOpt());
+		if (cmd.hasOption(flinkJar.getOpt())) {
+			String userPath = cmd.getOptionValue(flinkJar.getOpt());
 			if (!userPath.startsWith("file://")) {
 				userPath = "file://" + userPath;
 			}
@@ -283,7 +284,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 				localJarPath = new Path(new File(decodedPath).toURI());
 			} catch (UnsupportedEncodingException e) {
 				throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath +
-					" Please supply a path manually via the -" + FLINK_JAR.getOpt() + " option.");
+					" Please supply a path manually via the -" + flinkJar.getOpt() + " option.");
 			}
 		}
 
@@ -291,8 +292,8 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 
 		List<File> shipFiles = new ArrayList<>();
 		// path to directory to ship
-		if (cmd.hasOption(SHIP_PATH.getOpt())) {
-			String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt());
+		if (cmd.hasOption(shipPath.getOpt())) {
+			String shipPath = cmd.getOptionValue(this.shipPath.getOpt());
 			File shipDir = new File(shipPath);
 			if (shipDir.isDirectory()) {
 				shipFiles.add(shipDir);
@@ -304,51 +305,51 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 		yarnClusterDescriptor.addShipFiles(shipFiles);
 
 		// queue
-		if (cmd.hasOption(QUEUE.getOpt())) {
-			yarnClusterDescriptor.setQueue(cmd.getOptionValue(QUEUE.getOpt()));
+		if (cmd.hasOption(queue.getOpt())) {
+			yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt()));
 		}
 
 		// JobManager Memory
-		if (cmd.hasOption(JM_MEMORY.getOpt())) {
-			int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt()));
+		if (cmd.hasOption(jmMemory.getOpt())) {
+			int jmMemory = Integer.valueOf(cmd.getOptionValue(this.jmMemory.getOpt()));
 			yarnClusterDescriptor.setJobManagerMemory(jmMemory);
 		}
 
 		// Task Managers memory
-		if (cmd.hasOption(TM_MEMORY.getOpt())) {
-			int tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt()));
+		if (cmd.hasOption(tmMemory.getOpt())) {
+			int tmMemory = Integer.valueOf(cmd.getOptionValue(this.tmMemory.getOpt()));
 			yarnClusterDescriptor.setTaskManagerMemory(tmMemory);
 		}
 
-		if (cmd.hasOption(SLOTS.getOpt())) {
-			int slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt()));
+		if (cmd.hasOption(slots.getOpt())) {
+			int slots = Integer.valueOf(cmd.getOptionValue(this.slots.getOpt()));
 			yarnClusterDescriptor.setTaskManagerSlots(slots);
 		}
 
 		String[] dynamicProperties = null;
-		if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) {
-			dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt());
+		if (cmd.hasOption(dynamicproperties.getOpt())) {
+			dynamicProperties = cmd.getOptionValues(dynamicproperties.getOpt());
 		}
 		String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR);
 
 		yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded);
 
-		if (cmd.hasOption(DETACHED.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
+		if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
 			this.detachedMode = true;
 			yarnClusterDescriptor.setDetachedMode(true);
 		}
 
-		if(cmd.hasOption(NAME.getOpt())) {
-			yarnClusterDescriptor.setName(cmd.getOptionValue(NAME.getOpt()));
+		if (cmd.hasOption(name.getOpt())) {
+			yarnClusterDescriptor.setName(cmd.getOptionValue(name.getOpt()));
 		} else {
 			// set the default application name, if none is specified
-			if(defaultApplicationName != null) {
+			if (defaultApplicationName != null) {
 				yarnClusterDescriptor.setName(defaultApplicationName);
 			}
 		}
 
-		if (cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt())) {
-			String zookeeperNamespace = cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt());
+		if (cmd.hasOption(zookeeperNamespace.getOpt())) {
+			String zookeeperNamespace = cmd.getOptionValue(this.zookeeperNamespace.getOpt());
 			yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace);
 		}
 
@@ -368,7 +369,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 			String message = "The YARN cluster has " + maxSlots + " slots available, " +
 				"but the user requested a parallelism of " + userParallelism + " on YARN. " +
 				"Each of the " + yarnClusterDescriptor.getTaskManagerCount() + " TaskManagers " +
-				"will get "+slotsPerTM+" slots.";
+				"will get " + slotsPerTM + " slots.";
 			logAndSysout(message);
 			yarnClusterDescriptor.setTaskManagerSlots(slotsPerTM);
 		}
@@ -383,7 +384,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 		formatter.setLeftPadding(5);
 		formatter.setSyntaxPrefix("   Required");
 		Options req = new Options();
-		req.addOption(CONTAINER);
+		req.addOption(container);
 		formatter.printHelp(" ", req);
 
 		formatter.setSyntaxPrefix("   Optional");
@@ -403,7 +404,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 	}
 
 	public static void runInteractiveCli(YarnClusterClient yarnCluster, boolean readConsoleInput) {
-		final String HELP = "Available commands:\n" +
+		final String help = "Available commands:\n" +
 				"help - show these commands\n" +
 				"stop - stop the YARN session";
 		int numTaskmanagers = 0;
@@ -443,8 +444,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 				// wait until CLIENT_POLLING_INTERVAL is over or the user entered something.
 				long startTime = System.currentTimeMillis();
 				while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000
-						&& (!readConsoleInput || !in.ready()))
-				{
+						&& (!readConsoleInput || !in.ready())) {
 					Thread.sleep(200);
 				}
 				//------------- handle interactive command by user. ----------------------
@@ -458,10 +458,10 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 							break label;
 
 						case "help":
-							System.err.println(HELP);
+							System.err.println(help);
 							break;
 						default:
-							System.err.println("Unknown command '" + command + "'. Showing help: \n" + HELP);
+							System.err.println("Unknown command '" + command + "'. Showing help: \n" + help);
 							break;
 					}
 				}
@@ -471,7 +471,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 					break;
 				}
 			}
-		} catch(Exception e) {
+		} catch (Exception e) {
 			LOG.warn("Exception while running the interactive command line interface", e);
 		}
 	}
@@ -493,7 +493,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 	public boolean isActive(CommandLine commandLine, Configuration configuration) {
 		String jobManagerOption = commandLine.getOptionValue(ADDRESS_OPTION.getOpt(), null);
 		boolean yarnJobManager = ID.equals(jobManagerOption);
-		boolean yarnAppId = commandLine.hasOption(APPLICATION_ID.getOpt());
+		boolean yarnAppId = commandLine.hasOption(applicationId.getOpt());
 		return yarnJobManager || yarnAppId || loadYarnPropertiesFile(commandLine, configuration) != null;
 	}
 
@@ -504,14 +504,14 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 
 	@Override
 	public void addRunOptions(Options baseOptions) {
-		for (Object option : ALL_OPTIONS.getOptions()) {
+		for (Object option : allOptions.getOptions()) {
 			baseOptions.addOption((Option) option);
 		}
 	}
 
 	@Override
 	public void addGeneralOptions(Options baseOptions) {
-		baseOptions.addOption(APPLICATION_ID);
+		baseOptions.addOption(applicationId);
 	}
 
 	@Override
@@ -520,13 +520,13 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 			Configuration config) throws UnsupportedOperationException {
 
 		// first check for an application id, then try to load from yarn properties
-		String applicationID = cmdLine.hasOption(APPLICATION_ID.getOpt()) ?
-				cmdLine.getOptionValue(APPLICATION_ID.getOpt())
+		String applicationID = cmdLine.hasOption(applicationId.getOpt()) ?
+				cmdLine.getOptionValue(applicationId.getOpt())
 				: loadYarnPropertiesFile(cmdLine, config);
 
-		if(null != applicationID) {
-			String zkNamespace = cmdLine.hasOption(ZOOKEEPER_NAMESPACE.getOpt()) ?
-					cmdLine.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt())
+		if (null != applicationID) {
+			String zkNamespace = cmdLine.hasOption(zookeeperNamespace.getOpt()) ?
+					cmdLine.getOptionValue(zookeeperNamespace.getOpt())
 					: config.getString(HighAvailabilityOptions.HA_CLUSTER_ID, applicationID);
 			config.setString(HighAvailabilityOptions.HA_CLUSTER_ID, zkNamespace);
 
@@ -570,39 +570,39 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 		CommandLine cmd;
 		try {
 			cmd = parser.parse(options, args);
-		} catch(Exception e) {
+		} catch (Exception e) {
 			System.out.println(e.getMessage());
 			printUsage();
 			return 1;
 		}
 
 		// Query cluster for metrics
-		if (cmd.hasOption(QUERY.getOpt())) {
+		if (cmd.hasOption(query.getOpt())) {
 			AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
 			String description;
 			try {
 				description = yarnDescriptor.getClusterDescription();
 			} catch (Exception e) {
-				System.err.println("Error while querying the YARN cluster for available resources: "+e.getMessage());
+				System.err.println("Error while querying the YARN cluster for available resources: " + e.getMessage());
 				e.printStackTrace(System.err);
 				return 1;
 			}
 			System.out.println(description);
 			return 0;
-		} else if (cmd.hasOption(APPLICATION_ID.getOpt())) {
+		} else if (cmd.hasOption(applicationId.getOpt())) {
 
 			AbstractYarnClusterDescriptor yarnDescriptor = getClusterDescriptor();
 
 			//configure ZK namespace depending on the value passed
-			String zkNamespace = cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt()) ?
-									cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt())
-									:yarnDescriptor.getFlinkConfiguration()
-									.getString(HA_ZOOKEEPER_NAMESPACE_KEY, cmd.getOptionValue(APPLICATION_ID.getOpt()));
+			String zkNamespace = cmd.hasOption(zookeeperNamespace.getOpt()) ?
+									cmd.getOptionValue(zookeeperNamespace.getOpt())
+									: yarnDescriptor.getFlinkConfiguration()
+									.getString(HA_ZOOKEEPER_NAMESPACE_KEY, cmd.getOptionValue(applicationId.getOpt()));
 			LOG.info("Going to use the ZK namespace: {}", zkNamespace);
 			yarnDescriptor.getFlinkConfiguration().setString(HA_ZOOKEEPER_NAMESPACE_KEY, zkNamespace);
 
 			try {
-				yarnCluster = yarnDescriptor.retrieve(cmd.getOptionValue(APPLICATION_ID.getOpt()));
+				yarnCluster = yarnDescriptor.retrieve(cmd.getOptionValue(applicationId.getOpt()));
 			} catch (Exception e) {
 				throw new RuntimeException("Could not retrieve existing Yarn application", e);
 			}
@@ -610,7 +610,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 			if (detachedMode) {
 				LOG.info("The Flink YARN client has been started in detached mode. In order to stop " +
 					"Flink on YARN, use the following command or a YARN web interface to stop it:\n" +
-					"yarn application -kill " + APPLICATION_ID.getOpt());
+					"yarn application -kill " + applicationId.getOpt());
 				yarnCluster.disconnect();
 			} else {
 				runInteractiveCli(yarnCluster, true);
@@ -629,7 +629,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 			try {
 				yarnCluster = yarnDescriptor.deploy();
 			} catch (Exception e) {
-				System.err.println("Error while deploying YARN cluster: "+e.getMessage());
+				System.err.println("Error while deploying YARN cluster: " + e.getMessage());
 				e.printStackTrace(System.err);
 				return 1;
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnConfigOptions.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnConfigOptions.java b/flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnConfigOptions.java
index 8839c1e..28ef2ab 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnConfigOptions.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/configuration/YarnConfigOptions.java
@@ -24,7 +24,8 @@ import static org.apache.flink.configuration.ConfigOptions.key;
 
 /**
  * This class holds configuration constants used by Flink's YARN runners.
- * These options are not expected to be ever configured by users explicitly. 
+ *
+ * <p>These options are not expected to be ever configured by users explicitly.
  */
 public class YarnConfigOptions {
 
@@ -50,11 +51,11 @@ public class YarnConfigOptions {
 	public static final ConfigOption<String> CLASSPATH_INCLUDE_USER_JAR =
 		key("yarn.per-job-cluster.include-user-jar")
 			.defaultValue("ORDER");
-	
+
 
 	// ------------------------------------------------------------------------
 
-	/** This class is not meant to be instantiated */
+	/** This class is not meant to be instantiated. */
 	private YarnConfigOptions() {}
 
 	/** @see YarnConfigOptions#CLASSPATH_INCLUDE_USER_JAR */

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnHighAvailabilityServices.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnHighAvailabilityServices.java b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnHighAvailabilityServices.java
index f81d040..d94921e 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnHighAvailabilityServices.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnHighAvailabilityServices.java
@@ -39,20 +39,20 @@ import java.io.IOException;
 import java.net.URI;
 import java.util.concurrent.locks.ReentrantLock;
 
-import static org.apache.flink.util.Preconditions.checkNotNull;
 import static org.apache.flink.util.ExceptionUtils.firstOrSuppressed;
+import static org.apache.flink.util.Preconditions.checkNotNull;
 import static org.apache.flink.util.Preconditions.checkState;
 
 /**
  * The basis of {@link HighAvailabilityServices} for YARN setups.
  * These high-availability services auto-configure YARN's HDFS and the YARN application's
  * working directory to be used to store job recovery data.
- * 
+ *
  * <p>Note for implementers: This class locks access to and creation of services,
  * to make sure all services are properly shut down when shutting down this class.
  * To participate in the checks, overriding methods should frame method body with
  * calls to {@code enter()} and {@code exit()} as shown in the following pattern:
- * 
+ *
  * <pre>{@code
  * public LeaderRetrievalService getResourceManagerLeaderRetriever() {
  *     enter();
@@ -67,21 +67,21 @@ import static org.apache.flink.util.Preconditions.checkState;
  */
 public abstract class YarnHighAvailabilityServices implements HighAvailabilityServices {
 
-	/** The name of the sub directory in which Flink stores the recovery data */
+	/** The name of the sub directory in which Flink stores the recovery data. */
 	public static final String FLINK_RECOVERY_DATA_DIR = "flink_recovery_data";
 
-	/** Logger for these services, shared with subclasses */
+	/** Logger for these services, shared with subclasses. */
 	protected static final Logger LOG = LoggerFactory.getLogger(YarnHighAvailabilityServices.class);
 
 	// ------------------------------------------------------------------------
 
-	/** The lock that guards all accesses to methods in this class */
+	/** The lock that guards all accesses to methods in this class. */
 	private final ReentrantLock lock;
 
-	/** The Flink FileSystem object that represent the HDFS used by YARN */
+	/** The Flink FileSystem object that represent the HDFS used by YARN. */
 	protected final FileSystem flinkFileSystem;
 
-	/** The Hadoop FileSystem object that represent the HDFS used by YARN */
+	/** The Hadoop FileSystem object that represent the HDFS used by YARN. */
 	protected final org.apache.hadoop.fs.FileSystem hadoopFileSystem;
 
 	/** The working directory of this YARN application.
@@ -89,13 +89,13 @@ public abstract class YarnHighAvailabilityServices implements HighAvailabilitySe
 	protected final Path workingDirectory;
 
 	/** The directory for HA persistent data. This should be deleted when the
-	 * HA services clean up */
+	 * HA services clean up. */
 	protected final Path haDataDirectory;
 
-	/** Blob store service to be used for the BlobServer and BlobCache */
+	/** Blob store service to be used for the BlobServer and BlobCache. */
 	protected final BlobStoreService blobStoreService;
 
-	/** Flag marking this instance as shut down */
+	/** Flag marking this instance as shut down. */
 	private volatile boolean closed;
 
 	// ------------------------------------------------------------------------
@@ -103,13 +103,13 @@ public abstract class YarnHighAvailabilityServices implements HighAvailabilitySe
 	/**
 	 * Creates new YARN high-availability services, configuring the file system and recovery
 	 * data directory based on the working directory in the given Hadoop configuration.
-	 * 
+	 *
 	 * <p>This class requires that the default Hadoop file system configured in the given
 	 * Hadoop configuration is an HDFS.
-	 * 
+	 *
 	 * @param config     The Flink configuration of this component / process.
 	 * @param hadoopConf The Hadoop configuration for the YARN cluster.
-	 * 
+	 *
 	 * @throws IOException Thrown, if the initialization of the Hadoop file system used by YARN fails.
 	 */
 	protected YarnHighAvailabilityServices(
@@ -280,7 +280,7 @@ public abstract class YarnHighAvailabilityServices implements HighAvailabilitySe
 	/**
 	 * Acquires the lock and checks whether the services are already closed. If they are
 	 * already closed, the method releases the lock and returns {@code false}.
-	 * 
+	 *
 	 * @return True, if the lock was acquired and the services are not closed, false if the services are closed.
 	 */
 	boolean enterUnlessClosed() {
@@ -307,12 +307,12 @@ public abstract class YarnHighAvailabilityServices implements HighAvailabilitySe
 	/**
 	 * Creates the high-availability services for a single-job Flink YARN application, to be
 	 * used in the Application Master that runs both ResourceManager and JobManager.
-	 * 
+	 *
 	 * @param flinkConfig  The Flink configuration.
 	 * @param hadoopConfig The Hadoop configuration for the YARN cluster.
-	 * 
+	 *
 	 * @return The created high-availability services.
-	 * 
+	 *
 	 * @throws IOException Thrown, if the high-availability services could not be initialized.
 	 */
 	public static YarnHighAvailabilityServices forSingleJobAppMaster(

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java
index abfdb5c..accf8d5 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServices.java
@@ -34,7 +34,7 @@ import java.util.concurrent.Executors;
 /**
  * These YarnHighAvailabilityServices are for the Application Master in setups where there is one
  * ResourceManager that is statically configured in the Flink configuration.
- * 
+ *
  * <h3>Handled failure types</h3>
  * <ul>
  *     <li><b>User code & operator failures:</b> Failed operators are recovered from checkpoints.</li>
@@ -51,11 +51,11 @@ import java.util.concurrent.Executors;
  * <p>Internally, these services put their recovery data into YARN's working directory,
  * except for checkpoints, which are in the configured checkpoint directory. That way,
  * checkpoints can be resumed with a new job/application, even if the complete YARN application
- * is killed and cleaned up. 
+ * is killed and cleaned up.
  *
  * <p>Because ResourceManager and JobManager run both in the same process (Application Master), they
  * use an embedded leader election service to find each other.
- * 
+ *
  * <p>A typical YARN setup that uses these HA services first starts the ResourceManager
  * inside the ApplicationMaster and puts its RPC endpoint address into the configuration with which
  * the TaskManagers are started. Because of this static addressing scheme, the setup cannot handle failures
@@ -65,21 +65,21 @@ import java.util.concurrent.Executors;
  */
 public class YarnIntraNonHaMasterServices extends AbstractYarnNonHaServices {
 
-	/** The dispatcher thread pool for these services */
+	/** The dispatcher thread pool for these services. */
 	private final ExecutorService dispatcher;
 
-	/** The embedded leader election service used by JobManagers to find the resource manager */
+	/** The embedded leader election service used by JobManagers to find the resource manager. */
 	private final SingleLeaderElectionService resourceManagerLeaderElectionService;
 
 	// ------------------------------------------------------------------------
 
 	/**
 	 * Creates new YarnIntraNonHaMasterServices for the given Flink and YARN configuration.
-	 * 
-	 * This constructor initializes access to the HDFS to store recovery data, and creates the
+	 *
+	 * <p>This constructor initializes access to the HDFS to store recovery data, and creates the
 	 * embedded leader election services through which ResourceManager and JobManager find and
 	 * confirm each other.
-	 * 
+	 *
 	 * @param config     The Flink configuration of this component / process.
 	 * @param hadoopConf The Hadoop configuration for the YARN cluster.
 	 *

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterNonHaServices.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterNonHaServices.java b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterNonHaServices.java
index 9d05bbe..ae8f05b 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterNonHaServices.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterNonHaServices.java
@@ -35,7 +35,7 @@ import java.io.IOException;
 /**
  * These YarnHighAvailabilityServices are for use by the TaskManager in setups,
  * where there is one ResourceManager that is statically configured in the Flink configuration.
- * 
+ *
  * <h3>Handled failure types</h3>
  * <ul>
  *     <li><b>User code & operator failures:</b> Failed operators are recovered from checkpoints.</li>
@@ -52,7 +52,7 @@ import java.io.IOException;
  * <p>Internally, these services put their recovery data into YARN's working directory,
  * except for checkpoints, which are in the configured checkpoint directory. That way,
  * checkpoints can be resumed with a new job/application, even if the complete YARN application
- * is killed and cleaned up. 
+ * is killed and cleaned up.
  *
  * <p>A typical YARN setup that uses these HA services first starts the ResourceManager
  * inside the ApplicationMaster and puts its RPC endpoint address into the configuration with which
@@ -63,7 +63,7 @@ import java.io.IOException;
  */
 public class YarnPreConfiguredMasterNonHaServices extends AbstractYarnNonHaServices {
 
-	/** The RPC URL under which the single ResourceManager can be reached while available */ 
+	/** The RPC URL under which the single ResourceManager can be reached while available. */
 	private final String resourceManagerRpcUrl;
 
 	// ------------------------------------------------------------------------
@@ -72,7 +72,7 @@ public class YarnPreConfiguredMasterNonHaServices extends AbstractYarnNonHaServi
 	 * Creates new YarnPreConfiguredMasterHaServices for the given Flink and YARN configuration.
 	 * This constructor parses the ResourceManager address from the Flink configuration and sets
 	 * up the HDFS access to store recovery data in the YARN application's working directory.
-	 * 
+	 *
 	 * @param config     The Flink configuration of this component / process.
 	 * @param hadoopConf The Hadoop configuration for the YARN cluster.
 	 *
@@ -97,7 +97,7 @@ public class YarnPreConfiguredMasterNonHaServices extends AbstractYarnNonHaServi
 			final int rmPort = config.getInteger(YarnConfigOptions.APP_MASTER_RPC_PORT);
 
 			if (rmHost == null) {
-				throw new IllegalConfigurationException("Config parameter '" + 
+				throw new IllegalConfigurationException("Config parameter '" +
 						YarnConfigOptions.APP_MASTER_RPC_ADDRESS.key() + "' is missing.");
 			}
 			if (rmPort < 0) {
@@ -105,7 +105,7 @@ public class YarnPreConfiguredMasterNonHaServices extends AbstractYarnNonHaServi
 						YarnConfigOptions.APP_MASTER_RPC_PORT.key() + "' is missing.");
 			}
 			if (rmPort <= 0 || rmPort >= 65536) {
-				throw new IllegalConfigurationException("Invalid value for '" + 
+				throw new IllegalConfigurationException("Invalid value for '" +
 						YarnConfigOptions.APP_MASTER_RPC_PORT.key() + "' - port must be in [1, 65535]");
 			}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersAllocated.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersAllocated.java b/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersAllocated.java
index 2648e44..f11063f 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersAllocated.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersAllocated.java
@@ -19,6 +19,7 @@
 package org.apache.flink.yarn.messages;
 
 import org.apache.flink.yarn.YarnFlinkResourceManager;
+
 import org.apache.hadoop.yarn.api.records.Container;
 
 import java.util.List;
@@ -26,17 +27,17 @@ import java.util.List;
 /**
  * Message sent by the callback handler to the {@link YarnFlinkResourceManager}
  * to notify it that a set of new containers is available.
- * 
- * NOTE: This message is not serializable, because the Container object is not serializable.
+ *
+ * <p>NOTE: This message is not serializable, because the Container object is not serializable.
  */
 public class ContainersAllocated {
-	
+
 	private final List<Container> containers;
-	
+
 	public ContainersAllocated(List<Container> containers) {
 		this.containers = containers;
 	}
-	
+
 	public List<Container> containers() {
 		return containers;
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersComplete.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersComplete.java b/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersComplete.java
index 65bafbc..5b43835 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersComplete.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/messages/ContainersComplete.java
@@ -18,8 +18,8 @@
 
 package org.apache.flink.yarn.messages;
 
-
 import org.apache.flink.yarn.YarnFlinkResourceManager;
+
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
 
 import java.util.List;
@@ -27,17 +27,17 @@ import java.util.List;
 /**
  * Message sent by the callback handler to the {@link YarnFlinkResourceManager}
  * to notify it that a set of new containers is complete.
- * 
- * NOTE: This message is not serializable, because the ContainerStatus object is not serializable.
+ *
+ * <p>NOTE: This message is not serializable, because the ContainerStatus object is not serializable.
  */
 public class ContainersComplete {
-	
+
 	private final List<ContainerStatus> containers;
-	
+
 	public ContainersComplete(List<ContainerStatus> containers) {
 		this.containers = containers;
 	}
-	
+
 	public List<ContainerStatus> containers() {
 		return containers;
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/resources/log4j.properties b/flink-yarn/src/main/resources/log4j.properties
index 749796f..b2ad0d3 100644
--- a/flink-yarn/src/main/resources/log4j.properties
+++ b/flink-yarn/src/main/resources/log4j.properties
@@ -16,7 +16,6 @@
 # limitations under the License.
 ################################################################################
 
-
 # Convenience file for local debugging of the JobManager/TaskManager.
 log4j.rootLogger=INFO, console
 log4j.appender.console=org.apache.log4j.ConsoleAppender

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
index 35d5f56..9ac96a3 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
@@ -25,11 +25,11 @@ import grizzled.slf4j.Logger
 import org.apache.flink.configuration.Configuration
 import org.apache.flink.runtime.clusterframework.messages._
 import org.apache.flink.runtime.leaderretrieval.{LeaderRetrievalListener, LeaderRetrievalService}
-import org.apache.flink.runtime.{LeaderSessionMessageFilter, FlinkActor, LogMessages}
+import org.apache.flink.runtime.{FlinkActor, LeaderSessionMessageFilter, LogMessages}
 import org.apache.flink.yarn.YarnMessages._
+
 import scala.collection.mutable
 import scala.concurrent.duration._
-
 import scala.language.postfixOps
 
 /** Actor which is responsible to repeatedly poll the Yarn cluster status from the ResourceManager.
@@ -187,7 +187,7 @@ class ApplicationClient(
 
     // locally forward messages
     case LocalGetYarnMessage =>
-      if(messagesQueue.nonEmpty) {
+      if (messagesQueue.nonEmpty) {
         sender() ! decorateMessage(Option(messagesQueue.dequeue()))
       } else {
         sender() ! decorateMessage(None)

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
index e094bb7..d78b390 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
@@ -38,7 +38,6 @@ import org.apache.flink.runtime.metrics.MetricRegistry
 import scala.concurrent.duration._
 import scala.language.postfixOps
 
-
 /** JobManager actor for execution on Yarn. It enriches the [[JobManager]] with additional messages
   * to start/administer/stop the Yarn session.
   *

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnMessages.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnMessages.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnMessages.scala
index ada2631..1636e09 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnMessages.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnMessages.scala
@@ -18,13 +18,12 @@
 
 package org.apache.flink.yarn
 
-import java.util.{Date, UUID, List => JavaList}
+import java.util.{UUID, List => JavaList}
 
-import org.apache.flink.api.common.JobID
 import org.apache.flink.runtime.clusterframework.ApplicationStatus
 import org.apache.flink.runtime.messages.RequiresLeaderSessionID
 import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.yarn.api.records.{Container, ContainerStatus, FinalApplicationStatus}
+import org.apache.hadoop.yarn.api.records.{Container, ContainerStatus}
 
 import scala.concurrent.duration.{Deadline, FiniteDuration}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnTaskManager.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnTaskManager.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnTaskManager.scala
index b7f4c9a..e37ff6f 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnTaskManager.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnTaskManager.scala
@@ -23,9 +23,9 @@ import org.apache.flink.runtime.highavailability.HighAvailabilityServices
 import org.apache.flink.runtime.io.disk.iomanager.IOManager
 import org.apache.flink.runtime.io.network.NetworkEnvironment
 import org.apache.flink.runtime.memory.MemoryManager
-import org.apache.flink.runtime.taskmanager.{TaskManager, TaskManagerLocation}
 import org.apache.flink.runtime.metrics.MetricRegistry
 import org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration
+import org.apache.flink.runtime.taskmanager.{TaskManager, TaskManagerLocation}
 
 /** An extension of the TaskManager that listens for additional YARN related
   * messages.

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/TestingYarnFlinkResourceManager.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/TestingYarnFlinkResourceManager.java b/flink-yarn/src/test/java/org/apache/flink/yarn/TestingYarnFlinkResourceManager.java
index f03c604..d283c3b 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/TestingYarnFlinkResourceManager.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/TestingYarnFlinkResourceManager.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters;
@@ -26,6 +25,8 @@ import org.apache.flink.runtime.clusterframework.messages.NotifyResourceStarted;
 import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
 import org.apache.flink.yarn.messages.NotifyWhenResourcesRegistered;
 import org.apache.flink.yarn.messages.RequestNumberOfRegisteredResources;
+
+import akka.actor.ActorRef;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.client.api.AMRMClient;
 import org.apache.hadoop.yarn.client.api.NMClient;
@@ -35,6 +36,9 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import java.util.Comparator;
 import java.util.PriorityQueue;
 
+/**
+ * A test extension to the {@link YarnFlinkResourceManager} that can handle additional test messages.
+ */
 public class TestingYarnFlinkResourceManager extends YarnFlinkResourceManager {
 
 	private final PriorityQueue<Tuple2<Integer, ActorRef>> waitingQueue = new PriorityQueue<>(32, new Comparator<Tuple2<Integer, ActorRef>>() {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java b/flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java
index a09c5b2..a5ec176 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java
@@ -18,11 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.PoisonPill;
-import akka.actor.Props;
-import akka.testkit.JavaTestKit;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.runtime.akka.AkkaUtils;
 import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters;
@@ -36,6 +31,12 @@ import org.apache.flink.runtime.testingUtils.TestingUtils;
 import org.apache.flink.util.TestLogger;
 import org.apache.flink.yarn.messages.NotifyWhenResourcesRegistered;
 import org.apache.flink.yarn.messages.RequestNumberOfRegisteredResources;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.PoisonPill;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
@@ -53,11 +54,6 @@ import org.junit.Test;
 import org.mockito.Matchers;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
-import scala.Option;
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.Deadline;
-import scala.concurrent.duration.FiniteDuration;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -66,11 +62,20 @@ import java.util.List;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 
+import scala.Option;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.Deadline;
+import scala.concurrent.duration.FiniteDuration;
+
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
+/**
+ * Tests for {@link Utils}.
+ */
 public class UtilsTest extends TestLogger {
 
 	private static ActorSystem system;
@@ -99,7 +104,7 @@ public class UtilsTest extends TestLogger {
 			String applicationMasterHostName = "localhost";
 			String webInterfaceURL = "foobar";
 			ContaineredTaskManagerParameters taskManagerParameters = new ContaineredTaskManagerParameters(
-				1l, 1l, 1l, 1, new HashMap<String, String>());
+				1L, 1L, 1L, 1, new HashMap<String, String>());
 			ContainerLaunchContext taskManagerLaunchContext = mock(ContainerLaunchContext.class);
 			int yarnHeartbeatIntervalMillis = 1000;
 			int maxFailedContainers = 10;
@@ -203,7 +208,7 @@ public class UtilsTest extends TestLogger {
 					expectMsgClass(deadline.timeLeft(), Acknowledge.class);
 				}
 
-				Future<Object> numberOfRegisteredResourcesFuture = resourceManagerGateway.ask(RequestNumberOfRegisteredResources.Instance, deadline.timeLeft());
+				Future<Object> numberOfRegisteredResourcesFuture = resourceManagerGateway.ask(RequestNumberOfRegisteredResources.INSTANCE, deadline.timeLeft());
 
 				int numberOfRegisteredResources = (Integer) Await.result(numberOfRegisteredResourcesFuture, deadline.timeLeft());
 

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationMasterRunnerTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationMasterRunnerTest.java b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationMasterRunnerTest.java
index 4884dd0..19a0352 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationMasterRunnerTest.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationMasterRunnerTest.java
@@ -18,10 +18,11 @@
 
 package org.apache.flink.yarn;
 
-import com.google.common.collect.ImmutableMap;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters;
 import org.apache.flink.util.OperatingSystem;
+
+import com.google.common.collect.ImmutableMap;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.Assume;
@@ -38,7 +39,12 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.Map;
 
-import static org.apache.flink.yarn.YarnConfigKeys.*;
+import static org.apache.flink.yarn.YarnConfigKeys.ENV_APP_ID;
+import static org.apache.flink.yarn.YarnConfigKeys.ENV_CLIENT_HOME_DIR;
+import static org.apache.flink.yarn.YarnConfigKeys.ENV_CLIENT_SHIP_FILES;
+import static org.apache.flink.yarn.YarnConfigKeys.ENV_FLINK_CLASSPATH;
+import static org.apache.flink.yarn.YarnConfigKeys.ENV_HADOOP_USER_NAME;
+import static org.apache.flink.yarn.YarnConfigKeys.FLINK_JAR_PATH;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.anyInt;
@@ -46,6 +52,9 @@ import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
 
+/**
+ * Tests for the {@link YarnApplicationMasterRunner}.
+ */
 public class YarnApplicationMasterRunnerTest {
 	private static final Logger LOG = LoggerFactory.getLogger(YarnApplicationMasterRunnerTest.class);
 
@@ -81,7 +90,7 @@ public class YarnApplicationMasterRunnerTest {
 			}
 		}).when(yarnConf).getStrings(anyString(), Mockito.<String> anyVararg());
 
-		Map<String, String> env = ImmutableMap. <String, String> builder()
+		Map<String, String> env = ImmutableMap.<String, String> builder()
 			.put(ENV_APP_ID, "foo")
 			.put(ENV_CLIENT_HOME_DIR, home.getAbsolutePath())
 			.put(ENV_CLIENT_SHIP_FILES, "")

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
index a7204da..9326723 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnClusterDescriptorTest.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn;
 
 import org.apache.flink.configuration.ConfigConstants;
@@ -22,6 +23,7 @@ import org.apache.flink.configuration.Configuration;
 import org.apache.flink.configuration.CoreOptions;
 import org.apache.flink.configuration.IllegalConfigurationException;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.junit.Before;
@@ -32,9 +34,12 @@ import org.junit.rules.TemporaryFolder;
 import java.io.File;
 import java.io.IOException;
 
-import static org.junit.Assert.fail;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
+/**
+ * Tests for the {@link YarnClusterDescriptor}.
+ */
 public class YarnClusterDescriptorTest {
 
 	@Rule
@@ -140,7 +145,7 @@ public class YarnClusterDescriptorTest {
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + " " + krb5 +// jvmOpts
+				" " + " " + krb5 + // jvmOpts
 				" " + // logging
 				" " + mainClass + " " + args + " " + redirects,
 			clusterDescriptor
@@ -159,7 +164,7 @@ public class YarnClusterDescriptorTest {
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + " " + krb5 +// jvmOpts
+				" " + " " + krb5 + // jvmOpts
 				" " + logfile + " " + logback +
 				" " + mainClass + " " + args + " " + redirects,
 			clusterDescriptor
@@ -178,7 +183,7 @@ public class YarnClusterDescriptorTest {
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + " " + krb5 +// jvmOpts
+				" " + " " + krb5 + // jvmOpts
 				" " + logfile + " " + log4j +
 				" " + mainClass + " " + args + " " + redirects,
 			clusterDescriptor
@@ -197,7 +202,7 @@ public class YarnClusterDescriptorTest {
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + " " + krb5 +// jvmOpts
+				" " + " " + krb5 + // jvmOpts
 				" " + logfile + " " + logback + " " + log4j +
 				" " + mainClass + " " + args + " " + redirects,
 			clusterDescriptor
@@ -210,16 +215,16 @@ public class YarnClusterDescriptorTest {
 			java + " " + jvmmem +
 				" " + jvmOpts +
 				" " + logfile + " " + logback + " " + log4j +
-				" " + mainClass + " "  + args + " "+ redirects,
+				" " + mainClass + " "  + args + " " + redirects,
 			clusterDescriptor
 				.setupApplicationMasterContainer(true, true, false)
 				.getCommands().get(0));
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + jvmOpts + " " + krb5 +// jvmOpts
+				" " + jvmOpts + " " + krb5 + // jvmOpts
 				" " + logfile + " " + logback + " " + log4j +
-				" " + mainClass + " "  + args + " "+ redirects,
+				" " + mainClass + " "  + args + " " + redirects,
 			clusterDescriptor
 				.setupApplicationMasterContainer(true, true, true)
 				.getCommands().get(0));
@@ -230,16 +235,16 @@ public class YarnClusterDescriptorTest {
 			java + " " + jvmmem +
 				" " + jvmOpts + " " + jmJvmOpts +
 				" " + logfile + " " + logback + " " + log4j +
-				" " + mainClass + " "  + args + " "+ redirects,
+				" " + mainClass + " "  + args + " " + redirects,
 			clusterDescriptor
 				.setupApplicationMasterContainer(true, true, false)
 				.getCommands().get(0));
 
 		assertEquals(
 			java + " " + jvmmem +
-				" " + jvmOpts + " " + jmJvmOpts + " " + krb5 +// jvmOpts
+				" " + jvmOpts + " " + jmJvmOpts + " " + krb5 + // jvmOpts
 				" " + logfile + " " + logback + " " + log4j +
-				" " + mainClass + " "  + args + " "+ redirects,
+				" " + mainClass + " "  + args + " " + redirects,
 			clusterDescriptor
 				.setupApplicationMasterContainer(true, true, true)
 				.getCommands().get(0));

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServicesTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServicesTest.java b/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServicesTest.java
index b4d2ba8..bdc7863 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServicesTest.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnIntraNonHaMasterServicesTest.java
@@ -26,10 +26,9 @@ import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalListener;
 import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
 import org.apache.flink.util.OperatingSystem;
 import org.apache.flink.util.StringUtils;
-
 import org.apache.flink.util.TestLogger;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
 
+import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.AfterClass;
 import org.junit.Assume;
 import org.junit.Before;
@@ -37,7 +36,6 @@ import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
-
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -53,6 +51,9 @@ import static org.mockito.Mockito.timeout;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+/**
+ * Tests for YarnIntraNonHaMasterServices.
+ */
 public class YarnIntraNonHaMasterServicesTest extends TestLogger {
 
 	private static final Random RND = new Random();
@@ -60,9 +61,9 @@ public class YarnIntraNonHaMasterServicesTest extends TestLogger {
 	@ClassRule
 	public static final TemporaryFolder TEMP_DIR = new TemporaryFolder();
 
-	private static MiniDFSCluster HDFS_CLUSTER;
+	private static MiniDFSCluster hdfsCluster;
 
-	private static Path HDFS_ROOT_PATH;
+	private static Path hdfsRootPath;
 
 	private org.apache.hadoop.conf.Configuration hadoopConfig;
 
@@ -80,23 +81,23 @@ public class YarnIntraNonHaMasterServicesTest extends TestLogger {
 		hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, tempDir.getAbsolutePath());
 
 		MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
-		HDFS_CLUSTER = builder.build();
-		HDFS_ROOT_PATH = new Path(HDFS_CLUSTER.getURI());
+		hdfsCluster = builder.build();
+		hdfsRootPath = new Path(hdfsCluster.getURI());
 	}
 
 	@AfterClass
 	public static void destroyHDFS() {
-		if (HDFS_CLUSTER != null) {
-			HDFS_CLUSTER.shutdown();
+		if (hdfsCluster != null) {
+			hdfsCluster.shutdown();
 		}
-		HDFS_CLUSTER = null;
-		HDFS_ROOT_PATH = null;
+		hdfsCluster = null;
+		hdfsRootPath = null;
 	}
 
 	@Before
 	public void initConfig() {
 		hadoopConfig = new org.apache.hadoop.conf.Configuration();
-		hadoopConfig.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, HDFS_ROOT_PATH.toString());
+		hadoopConfig.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, hdfsRootPath.toString());
 	}
 
 	// ------------------------------------------------------------------------


[13/15] flink git commit: [FLINK-6701] Activate strict checkstyle for flink-yarn

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterHaServicesTest.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterHaServicesTest.java b/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterHaServicesTest.java
index 07c952a..30c2d1e 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterHaServicesTest.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/highavailability/YarnPreConfiguredMasterHaServicesTest.java
@@ -29,7 +29,6 @@ import org.apache.flink.util.TestLogger;
 import org.apache.flink.yarn.configuration.YarnConfigOptions;
 
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-
 import org.junit.AfterClass;
 import org.junit.Assume;
 import org.junit.Before;
@@ -44,14 +43,17 @@ import java.io.FileNotFoundException;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+/**
+ * Tests for YarnPreConfiguredMasterNonHaServices.
+ */
 public class YarnPreConfiguredMasterHaServicesTest extends TestLogger {
 
 	@ClassRule
 	public static final TemporaryFolder TEMP_DIR = new TemporaryFolder();
 
-	private static MiniDFSCluster HDFS_CLUSTER;
+	private static MiniDFSCluster hdfsCluster;
 
-	private static Path HDFS_ROOT_PATH;
+	private static Path hdfsRootPath;
 
 	private org.apache.hadoop.conf.Configuration hadoopConfig;
 
@@ -69,23 +71,23 @@ public class YarnPreConfiguredMasterHaServicesTest extends TestLogger {
 		hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, tempDir.getAbsolutePath());
 
 		MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
-		HDFS_CLUSTER = builder.build();
-		HDFS_ROOT_PATH = new Path(HDFS_CLUSTER.getURI());
+		hdfsCluster = builder.build();
+		hdfsRootPath = new Path(hdfsCluster.getURI());
 	}
 
 	@AfterClass
 	public static void destroyHDFS() {
-		if (HDFS_CLUSTER != null) {
-			HDFS_CLUSTER.shutdown();
+		if (hdfsCluster != null) {
+			hdfsCluster.shutdown();
 		}
-		HDFS_CLUSTER = null;
-		HDFS_ROOT_PATH = null;
+		hdfsCluster = null;
+		hdfsRootPath = null;
 	}
 
 	@Before
 	public void initConfig() {
 		hadoopConfig = new org.apache.hadoop.conf.Configuration();
-		hadoopConfig.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, HDFS_ROOT_PATH.toString());
+		hadoopConfig.set(org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY, hdfsRootPath.toString());
 	}
 
 	// ------------------------------------------------------------------------
@@ -142,9 +144,9 @@ public class YarnPreConfiguredMasterHaServicesTest extends TestLogger {
 			HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION);
 		services.closeAndCleanupAllData();
 
-		final FileSystem fileSystem = HDFS_ROOT_PATH.getFileSystem();
-		final Path workDir = new Path(HDFS_CLUSTER.getFileSystem().getWorkingDirectory().toString());
-		
+		final FileSystem fileSystem = hdfsRootPath.getFileSystem();
+		final Path workDir = new Path(hdfsCluster.getFileSystem().getWorkingDirectory().toString());
+
 		try {
 			fileSystem.getFileStatus(new Path(workDir, YarnHighAvailabilityServices.FLINK_RECOVERY_DATA_DIR));
 			fail("Flink recovery data directory still exists");
@@ -181,7 +183,6 @@ public class YarnPreConfiguredMasterHaServicesTest extends TestLogger {
 			fail();
 		} catch (UnsupportedOperationException ignored) {}
 
-
 		services.close();
 
 		// all these methods should fail now

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/messages/NotifyWhenResourcesRegistered.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/messages/NotifyWhenResourcesRegistered.java b/flink-yarn/src/test/java/org/apache/flink/yarn/messages/NotifyWhenResourcesRegistered.java
index ad5e683..77633ac 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/messages/NotifyWhenResourcesRegistered.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/messages/NotifyWhenResourcesRegistered.java
@@ -18,6 +18,9 @@
 
 package org.apache.flink.yarn.messages;
 
+/**
+ * Test message that causes the sender to be notified then a set number of resources were registered.
+ */
 public class NotifyWhenResourcesRegistered {
 
 	private final int numberResources;

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/test/java/org/apache/flink/yarn/messages/RequestNumberOfRegisteredResources.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/messages/RequestNumberOfRegisteredResources.java b/flink-yarn/src/test/java/org/apache/flink/yarn/messages/RequestNumberOfRegisteredResources.java
index ccccbab..f84b794 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/messages/RequestNumberOfRegisteredResources.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/messages/RequestNumberOfRegisteredResources.java
@@ -18,8 +18,11 @@
 
 package org.apache.flink.yarn.messages;
 
+/**
+ * Test message that returns the number of registered resources.
+ */
 public class RequestNumberOfRegisteredResources {
-	public static RequestNumberOfRegisteredResources Instance = new RequestNumberOfRegisteredResources();
+	public static final RequestNumberOfRegisteredResources INSTANCE = new RequestNumberOfRegisteredResources();
 
 	private RequestNumberOfRegisteredResources() {}
 }


[15/15] flink git commit: [FLINK-6701] Activate strict checkstyle for flink-yarn

Posted by ch...@apache.org.
[FLINK-6701] Activate strict checkstyle for flink-yarn

This closes #3990.


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/77b0fb9f
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/77b0fb9f
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/77b0fb9f

Branch: refs/heads/master
Commit: 77b0fb9fe3656a5ae7e2ca3bbce28cfa5a0e247e
Parents: d313ac7
Author: zentol <ch...@apache.org>
Authored: Wed May 24 15:10:15 2017 +0200
Committer: zentol <ch...@apache.org>
Committed: Fri May 26 19:17:59 2017 +0200

----------------------------------------------------------------------
 .../yarn/AbstractYarnClusterDescriptor.java     | 165 +++++++-------
 ...bstractYarnFlinkApplicationMasterRunner.java |  21 +-
 .../flink/yarn/RegisteredYarnWorkerNode.java    |   6 +-
 .../main/java/org/apache/flink/yarn/Utils.java  |  72 +++---
 .../flink/yarn/YarnApplicationMasterRunner.java |  58 +++--
 .../apache/flink/yarn/YarnClusterClient.java    |  71 +++---
 .../apache/flink/yarn/YarnClusterClientV2.java  |   8 +-
 .../flink/yarn/YarnClusterDescriptor.java       |   2 +-
 .../flink/yarn/YarnClusterDescriptorV2.java     |   4 +-
 .../org/apache/flink/yarn/YarnConfigKeys.java   |  18 +-
 .../flink/yarn/YarnContainerInLaunch.java       |   3 +-
 .../yarn/YarnFlinkApplicationMasterRunner.java  |  27 +--
 .../flink/yarn/YarnFlinkResourceManager.java    |  64 +++---
 .../apache/flink/yarn/YarnResourceManager.java  |  82 ++++---
 .../YarnResourceManagerCallbackHandler.java     |   8 +-
 .../flink/yarn/YarnTaskExecutorRunner.java      |   9 +-
 .../flink/yarn/YarnTaskManagerRunner.java       |  19 +-
 .../org/apache/flink/yarn/cli/FlinkYarnCLI.java |  99 +++++----
 .../flink/yarn/cli/FlinkYarnSessionCli.java     | 220 +++++++++----------
 .../yarn/configuration/YarnConfigOptions.java   |   7 +-
 .../YarnHighAvailabilityServices.java           |  36 +--
 .../YarnIntraNonHaMasterServices.java           |  16 +-
 .../YarnPreConfiguredMasterNonHaServices.java   |  12 +-
 .../yarn/messages/ContainersAllocated.java      |  11 +-
 .../flink/yarn/messages/ContainersComplete.java |  12 +-
 flink-yarn/src/main/resources/log4j.properties  |   1 -
 .../apache/flink/yarn/ApplicationClient.scala   |   6 +-
 .../org/apache/flink/yarn/YarnJobManager.scala  |   1 -
 .../org/apache/flink/yarn/YarnMessages.scala    |   5 +-
 .../org/apache/flink/yarn/YarnTaskManager.scala |   2 +-
 .../yarn/TestingYarnFlinkResourceManager.java   |   6 +-
 .../java/org/apache/flink/yarn/UtilsTest.java   |  29 ++-
 .../yarn/YarnApplicationMasterRunnerTest.java   |  15 +-
 .../flink/yarn/YarnClusterDescriptorTest.java   |  27 ++-
 .../YarnIntraNonHaMasterServicesTest.java       |  25 ++-
 .../YarnPreConfiguredMasterHaServicesTest.java  |  29 +--
 .../messages/NotifyWhenResourcesRegistered.java |   3 +
 .../RequestNumberOfRegisteredResources.java     |   5 +-
 38 files changed, 611 insertions(+), 593 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnClusterDescriptor.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnClusterDescriptor.java b/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnClusterDescriptor.java
index b9a4416..2315c70 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnClusterDescriptor.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnClusterDescriptor.java
@@ -34,6 +34,7 @@ import org.apache.flink.runtime.jobgraph.JobGraph;
 import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
 import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.configuration.YarnConfigOptions;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -109,6 +110,13 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 
 	/**
 	 * If the user has specified a different number of slots, we store them here
+	 * Files (usually in a distributed file system) used for the YARN session of Flink.
+	 * Contains configuration files and jar files.
+	 */
+	private Path sessionFilesDir;
+
+	/**
+	 * If the user has specified a different number of slots, we store them here.
 	 */
 	private int slots = -1;
 
@@ -128,7 +136,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 
 	private String dynamicPropertiesEncoded;
 
-	/** Lazily initialized list of files to ship */
+	/** Lazily initialized list of files to ship. */
 	protected List<File> shipFiles = new LinkedList<>();
 
 	private org.apache.flink.configuration.Configuration flinkConfiguration;
@@ -140,18 +148,18 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	private String zookeeperNamespace;
 
 	/** Optional Jar file to include in the system class loader of all application nodes
-	 * (for per-job submission) */
+	 * (for per-job submission). */
 	private final Set<File> userJarFiles = new HashSet<>();
 
 	private YarnConfigOptions.UserJarInclusion userJarInclusion;
 
 	public AbstractYarnClusterDescriptor() {
 		// for unit tests only
-		if(System.getenv("IN_TESTS") != null) {
+		if (System.getenv("IN_TESTS") != null) {
 			try {
 				conf.addResource(new File(System.getenv("YARN_CONF_DIR") + "/yarn-site.xml").toURI().toURL());
 			} catch (Throwable t) {
-				throw new RuntimeException("Error",t);
+				throw new RuntimeException("Error", t);
 			}
 		}
 
@@ -183,17 +191,17 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	protected abstract Class<?> getApplicationMasterClass();
 
 	public void setJobManagerMemory(int memoryMb) {
-		if(memoryMb < MIN_JM_MEMORY) {
+		if (memoryMb < MIN_JM_MEMORY) {
 			throw new IllegalArgumentException("The JobManager memory (" + memoryMb + ") is below the minimum required memory amount "
-				+ "of " + MIN_JM_MEMORY+ " MB");
+				+ "of " + MIN_JM_MEMORY + " MB");
 		}
 		this.jobManagerMemoryMb = memoryMb;
 	}
 
 	public void setTaskManagerMemory(int memoryMb) {
-		if(memoryMb < MIN_TM_MEMORY) {
+		if (memoryMb < MIN_TM_MEMORY) {
 			throw new IllegalArgumentException("The TaskManager memory (" + memoryMb + ") is below the minimum required memory amount "
-				+ "of " + MIN_TM_MEMORY+ " MB");
+				+ "of " + MIN_TM_MEMORY + " MB");
 		}
 		this.taskManagerMemoryMb = memoryMb;
 	}
@@ -209,7 +217,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	public void setTaskManagerSlots(int slots) {
-		if(slots <= 0) {
+		if (slots <= 0) {
 			throw new IllegalArgumentException("Number of TaskManager slots must be positive");
 		}
 		this.slots = slots;
@@ -224,7 +232,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	public void setLocalJarPath(Path localJarPath) {
-		if(!localJarPath.toString().endsWith("jar")) {
+		if (!localJarPath.toString().endsWith("jar")) {
 			throw new IllegalArgumentException("The passed jar path ('" + localJarPath + "') does not end with the 'jar' extension");
 		}
 		this.flinkJarPath = localJarPath;
@@ -239,7 +247,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	public void setTaskManagerCount(int tmCount) {
-		if(tmCount < 1) {
+		if (tmCount < 1) {
 			throw new IllegalArgumentException("The TaskManager count has to be at least 1.");
 		}
 		this.taskManagerCount = tmCount;
@@ -253,7 +261,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		for (File shipFile: shipFiles) {
 			// remove uberjar from ship list (by default everything in the lib/ folder is added to
 			// the list of files to ship, but we handle the uberjar separately.
-			if(!(shipFile.getName().startsWith("flink-dist") && shipFile.getName().endsWith("jar"))) {
+			if (!(shipFile.getName().startsWith("flink-dist") && shipFile.getName().endsWith("jar"))) {
 				this.shipFiles.add(shipFile);
 			}
 		}
@@ -274,7 +282,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 			return false;
 		}
 		try {
-			for(URL jarFile : requiredJarFiles) {
+			for (URL jarFile : requiredJarFiles) {
 				if (!userJarFiles.contains(new File(jarFile.toURI()))) {
 					return false;
 				}
@@ -303,21 +311,20 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		return this.dynamicPropertiesEncoded;
 	}
 
-
 	private void isReadyForDeployment() throws YarnDeploymentException {
-		if(taskManagerCount <= 0) {
+		if (taskManagerCount <= 0) {
 			throw new YarnDeploymentException("Taskmanager count must be positive");
 		}
-		if(this.flinkJarPath == null) {
+		if (this.flinkJarPath == null) {
 			throw new YarnDeploymentException("The Flink jar path is null");
 		}
-		if(this.configurationDirectory == null) {
+		if (this.configurationDirectory == null) {
 			throw new YarnDeploymentException("Configuration directory not set");
 		}
-		if(this.flinkConfigurationPath == null) {
+		if (this.flinkConfigurationPath == null) {
 			throw new YarnDeploymentException("Configuration path not set");
 		}
-		if(this.flinkConfiguration == null) {
+		if (this.flinkConfiguration == null) {
 			throw new YarnDeploymentException("Flink configuration object has not been set");
 		}
 
@@ -337,7 +344,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		}
 
 		// check if required Hadoop environment variables are set. If not, warn user
-		if(System.getenv("HADOOP_CONF_DIR") == null &&
+		if (System.getenv("HADOOP_CONF_DIR") == null &&
 			System.getenv("YARN_CONF_DIR") == null) {
 			LOG.warn("Neither the HADOOP_CONF_DIR nor the YARN_CONF_DIR environment variable is set. " +
 				"The Flink YARN Client needs one of these to be set to properly load the Hadoop " +
@@ -346,8 +353,8 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	private static boolean allocateResource(int[] nodeManagers, int toAllocate) {
-		for(int i = 0; i < nodeManagers.length; i++) {
-			if(nodeManagers[i] >= toAllocate) {
+		for (int i = 0; i < nodeManagers.length; i++) {
+			if (nodeManagers[i] >= toAllocate) {
 				nodeManagers[i] -= toAllocate;
 				return true;
 			}
@@ -372,7 +379,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	/**
-	 * Gets a Hadoop Yarn client
+	 * Gets a Hadoop Yarn client.
 	 * @return Returns a YarnClient which has to be shutdown manually
 	 */
 	protected YarnClient getYarnClient() {
@@ -420,7 +427,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	@Override
 	public YarnClusterClient deploy() {
 		try {
-			if(UserGroupInformation.isSecurityEnabled()) {
+			if (UserGroupInformation.isSecurityEnabled()) {
 				// note: UGI::hasKerberosCredentials inaccurately reports false
 				// for logins based on a keytab (fixed in Hadoop 2.6.1, see HADOOP-10786),
 				// so we check only in ticket cache scenario.
@@ -453,7 +460,6 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 
 		final YarnClient yarnClient = getYarnClient();
 
-
 		// ------------------ Check if the specified queue exists --------------------
 
 		try {
@@ -477,9 +483,9 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 			} else {
 				LOG.debug("The YARN cluster does not have any queues configured");
 			}
-		} catch(Throwable e) {
+		} catch (Throwable e) {
 			LOG.warn("Error while getting queue information from YARN: " + e.getMessage());
-			if(LOG.isDebugEnabled()) {
+			if (LOG.isDebugEnabled()) {
 				LOG.debug("Error details", e);
 			}
 		}
@@ -495,7 +501,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		// the yarnMinAllocationMB specifies the smallest possible container allocation size.
 		// all allocations below this value are automatically set to this value.
 		final int yarnMinAllocationMB = conf.getInt("yarn.scheduler.minimum-allocation-mb", 0);
-		if(jobManagerMemoryMb < yarnMinAllocationMB || taskManagerMemoryMb < yarnMinAllocationMB) {
+		if (jobManagerMemoryMb < yarnMinAllocationMB || taskManagerMemoryMb < yarnMinAllocationMB) {
 			LOG.warn("The JobManager or TaskManager memory is below the smallest possible YARN Container size. "
 				+ "The value of 'yarn.scheduler.minimum-allocation-mb' is '" + yarnMinAllocationMB + "'. Please increase the memory size." +
 				"YARN will allocate the smaller containers but the scheduler will account for the minimum-allocation-mb, maybe not all instances " +
@@ -503,10 +509,10 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		}
 
 		// set the memory to minAllocationMB to do the next checks correctly
-		if(jobManagerMemoryMb < yarnMinAllocationMB) {
+		if (jobManagerMemoryMb < yarnMinAllocationMB) {
 			jobManagerMemoryMb =  yarnMinAllocationMB;
 		}
-		if(taskManagerMemoryMb < yarnMinAllocationMB) {
+		if (taskManagerMemoryMb < yarnMinAllocationMB) {
 			taskManagerMemoryMb =  yarnMinAllocationMB;
 		}
 
@@ -515,56 +521,56 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		GetNewApplicationResponse appResponse = yarnApplication.getNewApplicationResponse();
 
 		Resource maxRes = appResponse.getMaximumResourceCapability();
-		final String NOTE = "Please check the 'yarn.scheduler.maximum-allocation-mb' and the 'yarn.nodemanager.resource.memory-mb' configuration values\n";
-		if(jobManagerMemoryMb > maxRes.getMemory() ) {
+		final String note = "Please check the 'yarn.scheduler.maximum-allocation-mb' and the 'yarn.nodemanager.resource.memory-mb' configuration values\n";
+		if (jobManagerMemoryMb > maxRes.getMemory()) {
 			failSessionDuringDeployment(yarnClient, yarnApplication);
 			throw new YarnDeploymentException("The cluster does not have the requested resources for the JobManager available!\n"
-				+ "Maximum Memory: " + maxRes.getMemory() + "MB Requested: " + jobManagerMemoryMb + "MB. " + NOTE);
+				+ "Maximum Memory: " + maxRes.getMemory() + "MB Requested: " + jobManagerMemoryMb + "MB. " + note);
 		}
 
-		if(taskManagerMemoryMb > maxRes.getMemory() ) {
+		if (taskManagerMemoryMb > maxRes.getMemory()) {
 			failSessionDuringDeployment(yarnClient, yarnApplication);
 			throw new YarnDeploymentException("The cluster does not have the requested resources for the TaskManagers available!\n"
-				+ "Maximum Memory: " + maxRes.getMemory() + " Requested: " + taskManagerMemoryMb + "MB. " + NOTE);
+				+ "Maximum Memory: " + maxRes.getMemory() + " Requested: " + taskManagerMemoryMb + "MB. " + note);
 		}
 
-		final String NOTE_RSC = "\nThe Flink YARN client will try to allocate the YARN session, but maybe not all TaskManagers are " +
+		final String noteRsc = "\nThe Flink YARN client will try to allocate the YARN session, but maybe not all TaskManagers are " +
 			"connecting from the beginning because the resources are currently not available in the cluster. " +
 			"The allocation might take more time than usual because the Flink YARN client needs to wait until " +
 			"the resources become available.";
 		int totalMemoryRequired = jobManagerMemoryMb + taskManagerMemoryMb * taskManagerCount;
 		ClusterResourceDescription freeClusterMem = getCurrentFreeClusterResources(yarnClient);
-		if(freeClusterMem.totalFreeMemory < totalMemoryRequired) {
+		if (freeClusterMem.totalFreeMemory < totalMemoryRequired) {
 			LOG.warn("This YARN session requires " + totalMemoryRequired + "MB of memory in the cluster. "
-				+ "There are currently only " + freeClusterMem.totalFreeMemory + "MB available." + NOTE_RSC);
+				+ "There are currently only " + freeClusterMem.totalFreeMemory + "MB available." + noteRsc);
 
 		}
-		if(taskManagerMemoryMb > freeClusterMem.containerLimit) {
+		if (taskManagerMemoryMb > freeClusterMem.containerLimit) {
 			LOG.warn("The requested amount of memory for the TaskManagers (" + taskManagerMemoryMb + "MB) is more than "
-				+ "the largest possible YARN container: " + freeClusterMem.containerLimit + NOTE_RSC);
+				+ "the largest possible YARN container: " + freeClusterMem.containerLimit + noteRsc);
 		}
-		if(jobManagerMemoryMb > freeClusterMem.containerLimit) {
+		if (jobManagerMemoryMb > freeClusterMem.containerLimit) {
 			LOG.warn("The requested amount of memory for the JobManager (" + jobManagerMemoryMb + "MB) is more than "
-				+ "the largest possible YARN container: " + freeClusterMem.containerLimit + NOTE_RSC);
+				+ "the largest possible YARN container: " + freeClusterMem.containerLimit + noteRsc);
 		}
 
 		// ----------------- check if the requested containers fit into the cluster.
 
 		int[] nmFree = Arrays.copyOf(freeClusterMem.nodeManagersFree, freeClusterMem.nodeManagersFree.length);
 		// first, allocate the jobManager somewhere.
-		if(!allocateResource(nmFree, jobManagerMemoryMb)) {
+		if (!allocateResource(nmFree, jobManagerMemoryMb)) {
 			LOG.warn("Unable to find a NodeManager that can fit the JobManager/Application master. " +
 				"The JobManager requires " + jobManagerMemoryMb + "MB. NodeManagers available: " +
-				Arrays.toString(freeClusterMem.nodeManagersFree) + NOTE_RSC);
+				Arrays.toString(freeClusterMem.nodeManagersFree) + noteRsc);
 		}
 		// allocate TaskManagers
-		for(int i = 0; i < taskManagerCount; i++) {
-			if(!allocateResource(nmFree, taskManagerMemoryMb)) {
+		for (int i = 0; i < taskManagerCount; i++) {
+			if (!allocateResource(nmFree, taskManagerMemoryMb)) {
 				LOG.warn("There is not enough memory available in the YARN cluster. " +
 					"The TaskManager(s) require " + taskManagerMemoryMb + "MB each. " +
 					"NodeManagers available: " + Arrays.toString(freeClusterMem.nodeManagersFree) + "\n" +
 					"After allocating the JobManager (" + jobManagerMemoryMb + "MB) and (" + i + "/" + taskManagerCount + ") TaskManagers, " +
-					"the following NodeManagers are available: " + Arrays.toString(nmFree)  + NOTE_RSC );
+					"the following NodeManagers are available: " + Arrays.toString(nmFree)  + noteRsc);
 			}
 		}
 
@@ -669,7 +675,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		// ship list that enables reuse of resources for task manager containers
 		StringBuilder envShipFileList = new StringBuilder();
 
-		// upload and register ship files	
+		// upload and register ship files
 		List<String> systemClassPaths = uploadAndRegisterFiles(systemShipFiles, fs, appId.toString(), paths, localResources, envShipFileList);
 
 		List<String> userClassPaths;
@@ -752,9 +758,9 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		Path remoteKrb5Path = null;
 		Path remoteYarnSiteXmlPath = null;
 		boolean hasKrb5 = false;
-		if(System.getenv("IN_TESTS") != null) {
+		if (System.getenv("IN_TESTS") != null) {
 			String krb5Config = System.getProperty("java.security.krb5.conf");
-			if(krb5Config != null && krb5Config.length() != 0) {
+			if (krb5Config != null && krb5Config.length() != 0) {
 				File krb5 = new File(krb5Config);
 				LOG.info("Adding KRB5 configuration {} to the AM container local resource bucket", krb5.getAbsolutePath());
 				LocalResource krb5ConfResource = Records.newRecord(LocalResource.class);
@@ -762,7 +768,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 				remoteKrb5Path = Utils.setupLocalResource(fs, appId.toString(), krb5ConfPath, krb5ConfResource, fs.getHomeDirectory());
 				localResources.put(Utils.KRB5_FILE_NAME, krb5ConfResource);
 
-				File f = new File(System.getenv("YARN_CONF_DIR"),Utils.YARN_SITE_FILE_NAME);
+				File f = new File(System.getenv("YARN_CONF_DIR"), Utils.YARN_SITE_FILE_NAME);
 				LOG.info("Adding Yarn configuration {} to the AM container local resource bucket", f.getAbsolutePath());
 				LocalResource yarnConfResource = Records.newRecord(LocalResource.class);
 				Path yarnSitePath = new Path(f.getAbsolutePath());
@@ -777,7 +783,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		LocalResource keytabResource = null;
 		Path remotePathKeytab = null;
 		String keytab = flinkConfiguration.getString(SecurityOptions.KERBEROS_LOGIN_KEYTAB);
-		if(keytab != null) {
+		if (keytab != null) {
 			LOG.info("Adding keytab {} to the AM container local resource bucket", keytab);
 			keytabResource = Records.newRecord(LocalResource.class);
 			Path keytabPath = new Path(keytab);
@@ -787,7 +793,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 
 		final ContainerLaunchContext amContainer = setupApplicationMasterContainer(hasLogback, hasLog4j, hasKrb5);
 
-		if ( UserGroupInformation.isSecurityEnabled() && keytab == null ) {
+		if (UserGroupInformation.isSecurityEnabled() && keytab == null) {
 			//set tokens only when keytab is not provided
 			LOG.info("Adding delegation token to the AM container..");
 			Utils.setTokensFor(amContainer, paths, conf);
@@ -806,7 +812,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		// set Flink on YARN internal configuration values
 		appMasterEnv.put(YarnConfigKeys.ENV_TM_COUNT, String.valueOf(taskManagerCount));
 		appMasterEnv.put(YarnConfigKeys.ENV_TM_MEMORY, String.valueOf(taskManagerMemoryMb));
-		appMasterEnv.put(YarnConfigKeys.FLINK_JAR_PATH, remotePathJar.toString() );
+		appMasterEnv.put(YarnConfigKeys.FLINK_JAR_PATH, remotePathJar.toString());
 		appMasterEnv.put(YarnConfigKeys.ENV_APP_ID, appId.toString());
 		appMasterEnv.put(YarnConfigKeys.ENV_CLIENT_HOME_DIR, fs.getHomeDirectory().toString());
 		appMasterEnv.put(YarnConfigKeys.ENV_CLIENT_SHIP_FILES, envShipFileList.toString());
@@ -818,19 +824,19 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		// https://github.com/apache/hadoop/blob/trunk/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/YarnApplicationSecurity.md#identity-on-an-insecure-cluster-hadoop_user_name
 		appMasterEnv.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, UserGroupInformation.getCurrentUser().getUserName());
 
-		if(keytabResource != null) {
-			appMasterEnv.put(YarnConfigKeys.KEYTAB_PATH, remotePathKeytab.toString() );
+		if (keytabResource != null) {
+			appMasterEnv.put(YarnConfigKeys.KEYTAB_PATH, remotePathKeytab.toString());
 			String principal = flinkConfiguration.getString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL);
-			appMasterEnv.put(YarnConfigKeys.KEYTAB_PRINCIPAL, principal );
+			appMasterEnv.put(YarnConfigKeys.KEYTAB_PRINCIPAL, principal);
 		}
 
 		//To support Yarn Secure Integration Test Scenario
-		if(remoteYarnSiteXmlPath != null && remoteKrb5Path != null) {
+		if (remoteYarnSiteXmlPath != null && remoteKrb5Path != null) {
 			appMasterEnv.put(YarnConfigKeys.ENV_YARN_SITE_XML_PATH, remoteYarnSiteXmlPath.toString());
-			appMasterEnv.put(YarnConfigKeys.ENV_KRB5_PATH, remoteKrb5Path.toString() );
+			appMasterEnv.put(YarnConfigKeys.ENV_KRB5_PATH, remoteKrb5Path.toString());
 		}
 
-		if(dynamicPropertiesEncoded != null) {
+		if (dynamicPropertiesEncoded != null) {
 			appMasterEnv.put(YarnConfigKeys.ENV_DYNAMIC_PROPERTIES, dynamicPropertiesEncoded);
 		}
 
@@ -845,9 +851,9 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		capability.setVirtualCores(1);
 
 		String name;
-		if(customName == null) {
+		if (customName == null) {
 			name = "Flink session with " + taskManagerCount + " TaskManagers";
-			if(detached) {
+			if (detached) {
 				name += " (detached)";
 			}
 		} else {
@@ -858,7 +864,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		appContext.setApplicationType("Apache Flink");
 		appContext.setAMContainerSpec(amContainer);
 		appContext.setResource(capability);
-		if(yarnQueue != null) {
+		if (yarnQueue != null) {
 			appContext.setQueue(yarnQueue);
 		}
 
@@ -874,7 +880,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		final long startTime = System.currentTimeMillis();
 		ApplicationReport report;
 		YarnApplicationState lastAppState = YarnApplicationState.NEW;
-		loop: while( true ) {
+		loop: while (true) {
 			try {
 				report = yarnClient.getApplicationReport(appId);
 			} catch (IOException e) {
@@ -899,7 +905,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 					if (appState != lastAppState) {
 						LOG.info("Deploying cluster, current state " + appState);
 					}
-					if(System.currentTimeMillis() - startTime > 60000) {
+					if (System.currentTimeMillis() - startTime > 60000) {
 						LOG.info("Deployment took more than 60 seconds. Please check if the requested resources are available in the YARN cluster");
 					}
 
@@ -922,7 +928,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		}
 		return report;
 	}
-	
+
 	private static List<String> uploadAndRegisterFiles(
 			Collection<File> shipFiles,
 			FileSystem fs,
@@ -971,7 +977,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	/**
 	 * Kills YARN application and stops YARN client.
 	 *
-	 * Use this method to kill the App before it has been properly deployed
+	 * <p>Use this method to kill the App before it has been properly deployed
 	 */
 	private void failSessionDuringDeployment(YarnClient yarnClient, YarnClientApplication yarnApplication) {
 		LOG.info("Killing YARN application");
@@ -986,11 +992,10 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		yarnClient.stop();
 	}
 
-
 	private static class ClusterResourceDescription {
-		final public int totalFreeMemory;
-		final public int containerLimit;
-		final public int[] nodeManagersFree;
+		public final int totalFreeMemory;
+		public final int containerLimit;
+		public final int[] nodeManagersFree;
 
 		public ClusterResourceDescription(int totalFreeMemory, int containerLimit, int[] nodeManagersFree) {
 			this.totalFreeMemory = totalFreeMemory;
@@ -1006,12 +1011,12 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		int containerLimit = 0;
 		int[] nodeManagersFree = new int[nodes.size()];
 
-		for(int i = 0; i < nodes.size(); i++) {
+		for (int i = 0; i < nodes.size(); i++) {
 			NodeReport rep = nodes.get(i);
-			int free = rep.getCapability().getMemory() - (rep.getUsed() != null ? rep.getUsed().getMemory() : 0 );
+			int free = rep.getCapability().getMemory() - (rep.getUsed() != null ? rep.getUsed().getMemory() : 0);
 			nodeManagersFree[i] = free;
 			totalFreeMemory += free;
-			if(free > containerLimit) {
+			if (free > containerLimit) {
 				containerLimit = free;
 			}
 		}
@@ -1060,7 +1065,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	public void setName(String name) {
-		if(name == null) {
+		if (name == null) {
 			throw new IllegalArgumentException("The passed name is null");
 		}
 		customName = name;
@@ -1098,9 +1103,9 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	 * Singleton object which uses reflection to determine whether the {@link ApplicationSubmissionContext}
 	 * supports various methods which, depending on the Hadoop version, may or may not be supported.
 	 *
-	 * If an unsupported method is invoked, nothing happens.
+	 * <p>If an unsupported method is invoked, nothing happens.
 	 *
-	 * Currently three methods are proxied:
+	 * <p>Currently three methods are proxied:
 	 * - setApplicationTags (>= 2.4.0)
 	 * - setAttemptFailuresValidityInterval (>= 2.6.0)
 	 * - setKeepContainersAcrossApplicationAttempts (>= 2.4.0)
@@ -1302,11 +1307,11 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 		if (hasLogback || hasLog4j) {
 			logging = "-Dlog.file=\"" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager.log\"";
 
-			if(hasLogback) {
+			if (hasLogback) {
 				logging += " -Dlogback.configurationFile=file:" + CONFIG_FILE_LOGBACK_NAME;
 			}
 
-			if(hasLog4j) {
+			if (hasLog4j) {
 				logging += " -Dlog4j.configuration=file:" + CONFIG_FILE_LOG4J_NAME;
 			}
 		}
@@ -1345,7 +1350,7 @@ public abstract class AbstractYarnClusterDescriptor implements ClusterDescriptor
 	}
 
 	/**
-	 * Creates a YarnClusterClient; may be overriden in tests
+	 * Creates a YarnClusterClient; may be overriden in tests.
 	 */
 	protected YarnClusterClient createYarnClusterClient(
 			AbstractYarnClusterDescriptor descriptor,

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnFlinkApplicationMasterRunner.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnFlinkApplicationMasterRunner.java b/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnFlinkApplicationMasterRunner.java
index 4b24f42..8bf6a2e 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnFlinkApplicationMasterRunner.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/AbstractYarnFlinkApplicationMasterRunner.java
@@ -27,10 +27,10 @@ import org.apache.flink.runtime.clusterframework.BootstrapTools;
 import org.apache.flink.runtime.security.SecurityUtils;
 import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,21 +43,20 @@ import java.util.concurrent.Callable;
  * It starts actor system and the actors for {@link org.apache.flink.runtime.jobmaster.JobMaster}
  * and {@link YarnResourceManager}.
  *
- * The JobMasters handles Flink job execution, while the YarnResourceManager handles container
+ * <p>The JobMasters handles Flink job execution, while the YarnResourceManager handles container
  * allocation and failure detection.
  */
 public abstract class AbstractYarnFlinkApplicationMasterRunner {
 
-	/** Logger */
 	protected static final Logger LOG = LoggerFactory.getLogger(AbstractYarnFlinkApplicationMasterRunner.class);
 
-	/** The process environment variables */
+	/** The process environment variables. */
 	protected static final Map<String, String> ENV = System.getenv();
 
-	/** The exit code returned if the initialization of the application master failed */
+	/** The exit code returned if the initialization of the application master failed. */
 	protected static final int INIT_ERROR_EXIT_CODE = 31;
 
-	/** The host name passed by env */
+	/** The host name passed by env. */
 	protected String appMasterHostname;
 
 	/**
@@ -87,7 +86,7 @@ public abstract class AbstractYarnFlinkApplicationMasterRunner {
 			LOG.info("Remote keytab principal obtained {}", remoteKeytabPrincipal);
 
 			String keytabPath = null;
-			if(remoteKeytabPath != null) {
+			if (remoteKeytabPath != null) {
 				File f = new File(currDir, Utils.KEYTAB_FILE_NAME);
 				keytabPath = f.getAbsolutePath();
 				LOG.debug("Keytab path: {}", keytabPath);
@@ -96,7 +95,7 @@ public abstract class AbstractYarnFlinkApplicationMasterRunner {
 			UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
 
 			LOG.info("YARN daemon is running as: {} Yarn client user obtainer: {}",
-					currentUser.getShortUserName(), yarnClientUsername );
+					currentUser.getShortUserName(), yarnClientUsername);
 
 			// Flink configuration
 			final Map<String, String> dynamicProperties =
@@ -122,7 +121,7 @@ public abstract class AbstractYarnFlinkApplicationMasterRunner {
 			}
 
 			SecurityUtils.SecurityConfiguration sc;
-			if(hadoopConfiguration != null) {
+			if (hadoopConfiguration != null) {
 				sc = new SecurityUtils.SecurityConfiguration(flinkConfig, hadoopConfiguration);
 			} else {
 				sc = new SecurityUtils.SecurityConfiguration(flinkConfig);
@@ -170,7 +169,7 @@ public abstract class AbstractYarnFlinkApplicationMasterRunner {
 	/**
 	 * @param baseDirectory  The working directory
 	 * @param additional Additional parameters
-	 * 
+	 *
 	 * @return The configuration to be used by the TaskExecutors.
 	 */
 	private static Configuration createConfiguration(String baseDirectory, Map<String, String> additional) {
@@ -194,7 +193,7 @@ public abstract class AbstractYarnFlinkApplicationMasterRunner {
 			configuration.setInteger(ConfigConstants.JOB_MANAGER_WEB_PORT_KEY, 0);
 		}
 
-		// if the user has set the deprecated YARN-specific config keys, we add the 
+		// if the user has set the deprecated YARN-specific config keys, we add the
 		// corresponding generic config keys instead. that way, later code needs not
 		// deal with deprecated config keys
 

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/RegisteredYarnWorkerNode.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/RegisteredYarnWorkerNode.java b/flink-yarn/src/main/java/org/apache/flink/yarn/RegisteredYarnWorkerNode.java
index cb2f40a..5f059bf 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/RegisteredYarnWorkerNode.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/RegisteredYarnWorkerNode.java
@@ -19,8 +19,8 @@
 package org.apache.flink.yarn;
 
 import org.apache.flink.runtime.clusterframework.types.ResourceID;
-
 import org.apache.flink.runtime.clusterframework.types.ResourceIDRetrievable;
+
 import org.apache.hadoop.yarn.api.records.Container;
 
 import static java.util.Objects.requireNonNull;
@@ -30,10 +30,10 @@ import static java.util.Objects.requireNonNull;
  */
 public class RegisteredYarnWorkerNode implements ResourceIDRetrievable {
 
-	/** The container on which the worker runs */
+	/** The container on which the worker runs. */
 	private final Container yarnContainer;
 
-	/** The resource id associated with this worker type */
+	/** The resource id associated with this worker type. */
 	private final ResourceID resourceID;
 
 	public RegisteredYarnWorkerNode(Container yarnContainer) {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java b/flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
index 60f7204..698b69e 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/Utils.java
@@ -18,25 +18,9 @@
 
 package org.apache.flink.yarn;
 
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.nio.ByteBuffer;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
+import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.runtime.clusterframework.BootstrapTools;
 import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.util.Records;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.flink.configuration.ConfigConstants;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -50,6 +34,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.StringInterner;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -57,6 +42,20 @@ import org.apache.hadoop.yarn.api.records.LocalResourceType;
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.util.Records;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
 
 import static org.apache.flink.yarn.YarnConfigKeys.ENV_FLINK_CLASSPATH;
 
@@ -64,20 +63,20 @@ import static org.apache.flink.yarn.YarnConfigKeys.ENV_FLINK_CLASSPATH;
  * Utility class that provides helper methods to work with Apache Hadoop YARN.
  */
 public final class Utils {
-	
+
 	private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
 
-	/** Keytab file name populated in YARN container */
+	/** Keytab file name populated in YARN container. */
 	public static final String KEYTAB_FILE_NAME = "krb5.keytab";
 
-	/** KRB5 file name populated in YARN container for secure IT run */
+	/** KRB5 file name populated in YARN container for secure IT run. */
 	public static final String KRB5_FILE_NAME = "krb5.conf";
 
-	/** Yarn site xml file name populated in YARN container for secure IT run */
+	/** Yarn site xml file name populated in YARN container for secure IT run. */
 	public static final String YARN_SITE_FILE_NAME = "yarn-site.xml";
 
 	/**
-	 * See documentation
+	 * See documentation.
 	 */
 	public static int calculateHeapSize(int memory, org.apache.flink.configuration.Configuration conf) {
 
@@ -102,14 +101,13 @@ public final class Utils {
 				+ "' is higher (" + minCutoff + ") than the requested amount of memory " + memory);
 		}
 
-		int heapLimit = (int)((float)memory * memoryCutoffRatio);
+		int heapLimit = (int) ((float) memory * memoryCutoffRatio);
 		if (heapLimit < minCutoff) {
 			heapLimit = minCutoff;
 		}
 		return memory - heapLimit;
 	}
 
-
 	public static void setupYarnClassPath(Configuration conf, Map<String, String> appMasterEnv) {
 		addToEnvironment(
 			appMasterEnv,
@@ -123,9 +121,7 @@ public final class Utils {
 		}
 	}
 
-
 	/**
-	 * 
 	 * @return Path to remote file (usually hdfs)
 	 * @throws IOException
 	 */
@@ -165,7 +161,7 @@ public final class Utils {
 		UserGroupInformation currUsr = UserGroupInformation.getCurrentUser();
 
 		Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens();
-		for(Token<? extends TokenIdentifier> token : usrTok) {
+		for (Token<? extends TokenIdentifier> token : usrTok) {
 			final Text id = new Text(token.getIdentifier());
 			LOG.info("Adding user token " + id + " with " + token);
 			credentials.addToken(id, token);
@@ -173,7 +169,7 @@ public final class Utils {
 		try (DataOutputBuffer dob = new DataOutputBuffer()) {
 			credentials.writeTokenStorageToStream(dob);
 
-			if(LOG.isDebugEnabled()) {
+			if (LOG.isDebugEnabled()) {
 				LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength());
 			}
 
@@ -193,7 +189,7 @@ public final class Utils {
 				// Intended call: HBaseConfiguration.addHbaseResources(conf);
 				Class
 						.forName("org.apache.hadoop.hbase.HBaseConfiguration")
-						.getMethod("addHbaseResources", Configuration.class )
+						.getMethod("addHbaseResources", Configuration.class)
 						.invoke(null, conf);
 				// ----
 
@@ -220,7 +216,7 @@ public final class Utils {
 
 				credentials.addToken(token.getService(), token);
 				LOG.info("Added HBase Kerberos security token to credentials.");
-			} catch ( ClassNotFoundException
+			} catch (ClassNotFoundException
 					| NoSuchMethodException
 					| IllegalAccessException
 					| InvocationTargetException e) {
@@ -231,7 +227,7 @@ public final class Utils {
 	}
 
 	/**
-	 * Copied method from org.apache.hadoop.yarn.util.Apps
+	 * Copied method from org.apache.hadoop.yarn.util.Apps.
 	 * It was broken by YARN-1824 (2.4.0) and fixed for 2.4.1
 	 * by https://issues.apache.org/jira/browse/YARN-1931
 	 */
@@ -262,8 +258,8 @@ public final class Utils {
 	 */
 	public static Map<String, String> getEnvironmentVariables(String envPrefix, org.apache.flink.configuration.Configuration flinkConfiguration) {
 		Map<String, String> result  = new HashMap<>();
-		for(Map.Entry<String, String> entry: flinkConfiguration.toMap().entrySet()) {
-			if(entry.getKey().startsWith(envPrefix) && entry.getKey().length() > envPrefix.length()) {
+		for (Map.Entry<String, String> entry: flinkConfiguration.toMap().entrySet()) {
+			if (entry.getKey().startsWith(envPrefix) && entry.getKey().length() > envPrefix.length()) {
 				// remove prefix
 				String key = entry.getKey().substring(envPrefix.length());
 				result.put(key, entry.getValue());
@@ -347,7 +343,7 @@ public final class Utils {
 
 		//register keytab
 		LocalResource keytabResource = null;
-		if(remoteKeytabPath != null) {
+		if (remoteKeytabPath != null) {
 			log.info("Adding keytab {} to the AM container local resource bucket", remoteKeytabPath);
 			keytabResource = Records.newRecord(LocalResource.class);
 			Path keytabPath = new Path(remoteKeytabPath);
@@ -359,7 +355,7 @@ public final class Utils {
 		LocalResource yarnConfResource = null;
 		LocalResource krb5ConfResource = null;
 		boolean hasKrb5 = false;
-		if(remoteYarnConfPath != null && remoteKrb5Path != null) {
+		if (remoteYarnConfPath != null && remoteKrb5Path != null) {
 			log.info("TM:Adding remoteYarnConfPath {} to the container local resource bucket", remoteYarnConfPath);
 			yarnConfResource = Records.newRecord(LocalResource.class);
 			Path yarnConfPath = new Path(remoteYarnConfPath);
@@ -405,12 +401,12 @@ public final class Utils {
 		taskManagerLocalResources.put("flink-conf.yaml", flinkConf);
 
 		//To support Yarn Secure Integration Test Scenario
-		if(yarnConfResource != null && krb5ConfResource != null) {
+		if (yarnConfResource != null && krb5ConfResource != null) {
 			taskManagerLocalResources.put(YARN_SITE_FILE_NAME, yarnConfResource);
 			taskManagerLocalResources.put(KRB5_FILE_NAME, krb5ConfResource);
 		}
 
-		if(keytabResource != null) {
+		if (keytabResource != null) {
 			taskManagerLocalResources.put(KEYTAB_FILE_NAME, keytabResource);
 		}
 
@@ -450,7 +446,7 @@ public final class Utils {
 
 		containerEnv.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, UserGroupInformation.getCurrentUser().getUserName());
 
-		if(remoteKeytabPath != null && remoteKeytabPrincipal != null) {
+		if (remoteKeytabPath != null && remoteKeytabPrincipal != null) {
 			containerEnv.put(YarnConfigKeys.KEYTAB_PATH, remoteKeytabPath);
 			containerEnv.put(YarnConfigKeys.KEYTAB_PRINCIPAL, remoteKeytabPrincipal);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationMasterRunner.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationMasterRunner.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationMasterRunner.java
index 64417f6..a424740 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationMasterRunner.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnApplicationMasterRunner.java
@@ -18,10 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Props;
-
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.configuration.GlobalConfiguration;
@@ -48,19 +44,17 @@ import org.apache.flink.runtime.util.SignalHandler;
 import org.apache.flink.runtime.webmonitor.WebMonitor;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
 
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import scala.Option;
-import scala.Some;
-import scala.concurrent.duration.FiniteDuration;
-
 import java.io.File;
 import java.util.Map;
 import java.util.concurrent.Callable;
@@ -69,41 +63,43 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
+import scala.Option;
+import scala.Some;
+import scala.concurrent.duration.FiniteDuration;
+
 import static org.apache.flink.yarn.Utils.require;
 
 /**
  * This class is the executable entry point for the YARN application master.
  * It starts actor system and the actors for {@link JobManager}
  * and {@link YarnFlinkResourceManager}.
- * 
- * The JobManager handles Flink job execution, while the YarnFlinkResourceManager handles container
+ *
+ * <p>The JobManager handles Flink job execution, while the YarnFlinkResourceManager handles container
  * allocation and failure detection.
  */
 public class YarnApplicationMasterRunner {
 
-	/** Logger */
 	protected static final Logger LOG = LoggerFactory.getLogger(YarnApplicationMasterRunner.class);
 
 	/** The maximum time that TaskManagers may be waiting to register at the JobManager,
-	 * before they quit */
+	 * before they quit. */
 	private static final FiniteDuration TASKMANAGER_REGISTRATION_TIMEOUT = new FiniteDuration(5, TimeUnit.MINUTES);
 
-	/** The process environment variables */
+	/** The process environment variables. */
 	private static final Map<String, String> ENV = System.getenv();
 
-	/** The exit code returned if the initialization of the application master failed */
+	/** The exit code returned if the initialization of the application master failed. */
 	private static final int INIT_ERROR_EXIT_CODE = 31;
 
-	/** The exit code returned if the process exits because a critical actor died */
+	/** The exit code returned if the process exits because a critical actor died. */
 	private static final int ACTOR_DIED_EXIT_CODE = 32;
 
-
 	// ------------------------------------------------------------------------
 	//  Program entry point
 	// ------------------------------------------------------------------------
 
 	/**
-	 * The entry point for the YARN application master. 
+	 * The entry point for the YARN application master.
 	 *
 	 * @param args The command line arguments.
 	 */
@@ -144,7 +140,7 @@ public class YarnApplicationMasterRunner {
 			LOG.info("remoteKeytabPrincipal obtained {}", remoteKeytabPrincipal);
 
 			String keytabPath = null;
-			if(remoteKeytabPath != null) {
+			if (remoteKeytabPath != null) {
 				File f = new File(currDir, Utils.KEYTAB_FILE_NAME);
 				keytabPath = f.getAbsolutePath();
 				LOG.debug("keytabPath: {}", keytabPath);
@@ -153,7 +149,7 @@ public class YarnApplicationMasterRunner {
 			UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
 
 			LOG.info("YARN daemon is running as: {} Yarn client user obtainer: {}",
-					currentUser.getShortUserName(), yarnClientUsername );
+					currentUser.getShortUserName(), yarnClientUsername);
 
 			// Flink configuration
 			final Map<String, String> dynamicProperties =
@@ -172,7 +168,7 @@ public class YarnApplicationMasterRunner {
 
 			//To support Yarn Secure Integration Test Scenario
 			File krb5Conf = new File(currDir, Utils.KRB5_FILE_NAME);
-			if(krb5Conf.exists() && krb5Conf.canRead()) {
+			if (krb5Conf.exists() && krb5Conf.canRead()) {
 				String krb5Path = krb5Conf.getAbsolutePath();
 				LOG.info("KRB5 Conf: {}", krb5Path);
 				hadoopConfiguration = new org.apache.hadoop.conf.Configuration();
@@ -181,7 +177,7 @@ public class YarnApplicationMasterRunner {
 			}
 
 			SecurityUtils.SecurityConfiguration sc;
-			if(hadoopConfiguration != null) {
+			if (hadoopConfiguration != null) {
 				sc = new SecurityUtils.SecurityConfiguration(flinkConfig, hadoopConfiguration);
 			} else {
 				sc = new SecurityUtils.SecurityConfiguration(flinkConfig);
@@ -298,7 +294,6 @@ public class YarnApplicationMasterRunner {
 				taskManagerParameters.taskManagerHeapSizeMB(),
 				taskManagerParameters.taskManagerDirectMemoryLimitMB());
 
-
 			// ----------------- (2) start the actor system -------------------
 
 			// try to start the actor system, JobManager and JobManager actor system
@@ -314,7 +309,6 @@ public class YarnApplicationMasterRunner {
 
 			LOG.info("Actor system bound to hostname {}.", akkaHostname);
 
-
 			// ---- (3) Generate the configuration for the TaskManagers
 
 			final Configuration taskManagerConfig = BootstrapTools.generateTaskManagerConfiguration(
@@ -326,7 +320,6 @@ public class YarnApplicationMasterRunner {
 				taskManagerParameters, taskManagerConfig,
 				currDir, getTaskManagerClass(), LOG);
 
-
 			// ---- (4) start the actors and components in this order:
 
 			// 1) JobManager & Archive (in non-HA case, the leader service takes this)
@@ -360,7 +353,6 @@ public class YarnApplicationMasterRunner {
 				getJobManagerClass(),
 				getArchivistClass())._1();
 
-
 			// 2: the web monitor
 			LOG.debug("Starting Web Frontend");
 
@@ -390,7 +382,7 @@ public class YarnApplicationMasterRunner {
 				webMonitorURL,
 				taskManagerParameters,
 				taskManagerContext,
-				numInitialTaskManagers, 
+				numInitialTaskManagers,
 				LOG);
 
 			ActorRef resourceMaster = actorSystem.actorOf(resourceMasterProps);
@@ -467,7 +459,6 @@ public class YarnApplicationMasterRunner {
 		return 0;
 	}
 
-
 	// ------------------------------------------------------------------------
 	//  For testing, this allows to override the actor classes used for
 	//  JobManager and the archive of completed jobs
@@ -494,10 +485,11 @@ public class YarnApplicationMasterRunner {
 	// ------------------------------------------------------------------------
 
 	/**
-	 * 
-	 * @param baseDirectory
-	 * @param additional
-	 * 
+	 * Reads the global configuration from the given directory and adds the given parameters to it.
+	 *
+	 * @param baseDirectory directory to load the configuration from
+	 * @param additional additional parameters to be included in the configuration
+	 *
 	 * @return The configuration to be used by the TaskManagers.
 	 */
 	@SuppressWarnings("deprecation")
@@ -522,7 +514,7 @@ public class YarnApplicationMasterRunner {
 			configuration.setInteger(ConfigConstants.JOB_MANAGER_WEB_PORT_KEY, 0);
 		}
 
-		// if the user has set the deprecated YARN-specific config keys, we add the 
+		// if the user has set the deprecated YARN-specific config keys, we add the
 		// corresponding generic config keys instead. that way, later code needs not
 		// deal with deprecated config keys
 

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClient.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClient.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClient.java
index 7042f99..a435ef7 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClient.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClient.java
@@ -15,14 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
+package org.apache.flink.yarn;
 
-import akka.actor.ActorSystem;
-import akka.actor.Props;
-import akka.pattern.Patterns;
-import akka.util.Timeout;
 import org.apache.flink.api.common.JobID;
 import org.apache.flink.api.common.JobSubmissionResult;
 import org.apache.flink.client.program.ClusterClient;
@@ -39,6 +34,12 @@ import org.apache.flink.runtime.jobgraph.JobGraph;
 import org.apache.flink.util.FlinkException;
 import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Props;
+import akka.pattern.Patterns;
+import akka.util.Timeout;
 import org.apache.hadoop.service.Service;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -47,10 +48,6 @@ import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.Option;
-import scala.concurrent.Await;
-import scala.concurrent.Future;
-import scala.concurrent.duration.FiniteDuration;
 
 import java.io.File;
 import java.io.IOException;
@@ -59,6 +56,11 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import scala.Option;
+import scala.concurrent.Await;
+import scala.concurrent.Future;
+import scala.concurrent.duration.FiniteDuration;
+
 /**
  * Java representation of a running Flink cluster within YARN.
  */
@@ -84,7 +86,7 @@ public class YarnClusterClient extends ClusterClient {
 
 	private boolean isConnected = true;
 
-	/** Indicator whether this cluster has just been created */
+	/** Indicator whether this cluster has just been created. */
 	private final boolean newlyCreatedCluster;
 
 	/**
@@ -128,7 +130,7 @@ public class YarnClusterClient extends ClusterClient {
 	}
 
 	/**
-	 * Disconnect from the Yarn cluster
+	 * Disconnect from the Yarn cluster.
 	 */
 	public void disconnect() {
 
@@ -136,7 +138,7 @@ public class YarnClusterClient extends ClusterClient {
 			return;
 		}
 
-		if(!isConnected) {
+		if (!isConnected) {
 			throw new IllegalStateException("Can not disconnect from an unconnected cluster.");
 		}
 
@@ -151,7 +153,7 @@ public class YarnClusterClient extends ClusterClient {
 		try {
 			pollingRunner.stopRunner();
 			pollingRunner.join(1000);
-		} catch(InterruptedException e) {
+		} catch (InterruptedException e) {
 			LOG.warn("Shutdown of the polling runner was interrupted", e);
 			Thread.currentThread().interrupt();
 		}
@@ -159,7 +161,6 @@ public class YarnClusterClient extends ClusterClient {
 		isConnected = false;
 	}
 
-
 	// -------------------------- Interaction with the cluster ------------------------
 
 	/*
@@ -209,7 +210,7 @@ public class YarnClusterClient extends ClusterClient {
 	@Override
 	public String getWebInterfaceURL() {
 		// there seems to be a difference between HD 2.2.0 and 2.6.0
-		if(!trackingURL.startsWith("http://")) {
+		if (!trackingURL.startsWith("http://")) {
 			return "http://" + trackingURL;
 		} else {
 			return trackingURL;
@@ -226,10 +227,10 @@ public class YarnClusterClient extends ClusterClient {
 	 */
 	@Override
 	public GetClusterStatusResponse getClusterStatus() {
-		if(!isConnected) {
+		if (!isConnected) {
 			throw new IllegalStateException("The cluster is not connected to the cluster.");
 		}
-		if(hasBeenShutdown()) {
+		if (hasBeenShutdown()) {
 			throw new IllegalStateException("The cluster has already been shutdown.");
 		}
 
@@ -245,17 +246,17 @@ public class YarnClusterClient extends ClusterClient {
 	}
 
 	public ApplicationStatus getApplicationStatus() {
-		if(!isConnected) {
+		if (!isConnected) {
 			throw new IllegalStateException("The cluster has been connected to the ApplicationMaster.");
 		}
 		ApplicationReport lastReport = null;
-		if(pollingRunner == null) {
+		if (pollingRunner == null) {
 			LOG.warn("YarnClusterClient.getApplicationStatus() has been called on an uninitialized cluster." +
 					"The system might be in an erroneous state");
 		} else {
 			lastReport = pollingRunner.getLastReport();
 		}
-		if(lastReport == null) {
+		if (lastReport == null) {
 			LOG.warn("YarnClusterClient.getApplicationStatus() has been called on a cluster that didn't receive a status so far." +
 					"The system might be in an erroneous state");
 			return ApplicationStatus.UNKNOWN;
@@ -264,7 +265,7 @@ public class YarnClusterClient extends ClusterClient {
 			ApplicationStatus status =
 				(appState == YarnApplicationState.FAILED || appState == YarnApplicationState.KILLED) ?
 					ApplicationStatus.FAILED : ApplicationStatus.SUCCEEDED;
-			if(status != ApplicationStatus.SUCCEEDED) {
+			if (status != ApplicationStatus.SUCCEEDED) {
 				LOG.warn("YARN reported application state {}", appState);
 				LOG.warn("Diagnostics: {}", lastReport.getDiagnostics());
 			}
@@ -275,17 +276,17 @@ public class YarnClusterClient extends ClusterClient {
 	@Override
 	public List<String> getNewMessages() {
 
-		if(hasBeenShutdown()) {
+		if (hasBeenShutdown()) {
 			throw new RuntimeException("The YarnClusterClient has already been stopped");
 		}
 
-		if(!isConnected) {
+		if (!isConnected) {
 			throw new IllegalStateException("The cluster has been connected to the ApplicationMaster.");
 		}
 
 		List<String> ret = new ArrayList<String>();
 		// get messages from ApplicationClient (locally)
-		while(true) {
+		while (true) {
 			Object result;
 			try {
 				Future<Object> response =
@@ -294,23 +295,23 @@ public class YarnClusterClient extends ClusterClient {
 						YarnMessages.getLocalGetYarnMessage(),
 						new Timeout(akkaDuration));
 				result = Await.result(response, akkaDuration);
-			} catch(Exception ioe) {
+			} catch (Exception ioe) {
 				LOG.warn("Error retrieving the YARN messages locally", ioe);
 				break;
 			}
 
-			if(!(result instanceof Option)) {
+			if (!(result instanceof Option)) {
 				throw new RuntimeException("LocalGetYarnMessage requires a response of type " +
 						"Option. Instead the response is of type " + result.getClass() + ".");
 			} else {
 				Option messageOption = (Option) result;
 				LOG.debug("Received message option {}", messageOption);
-				if(messageOption.isEmpty()) {
+				if (messageOption.isEmpty()) {
 					break;
 				} else {
 					Object obj = messageOption.get();
 
-					if(obj instanceof InfoMessage) {
+					if (obj instanceof InfoMessage) {
 						InfoMessage msg = (InfoMessage) obj;
 						ret.add("[" + msg.date() + "] " + msg.message());
 					} else {
@@ -339,7 +340,7 @@ public class YarnClusterClient extends ClusterClient {
 	}
 
 	/**
-	 * Shuts down the Yarn application
+	 * Shuts down the Yarn application.
 	 */
 	public void shutdownCluster() {
 
@@ -365,7 +366,7 @@ public class YarnClusterClient extends ClusterClient {
 							"Flink YARN Client requested shutdown"),
 					new Timeout(akkaDuration));
 			Await.ready(response, akkaDuration);
-		} catch(Exception e) {
+		} catch (Exception e) {
 			LOG.warn("Error while stopping YARN cluster.", e);
 		}
 
@@ -385,7 +386,7 @@ public class YarnClusterClient extends ClusterClient {
 		try {
 			pollingRunner.stopRunner();
 			pollingRunner.join(1000);
-		} catch(InterruptedException e) {
+		} catch (InterruptedException e) {
 			LOG.warn("Shutdown of the polling runner was interrupted", e);
 			Thread.currentThread().interrupt();
 		}
@@ -420,7 +421,6 @@ public class YarnClusterClient extends ClusterClient {
 		return hasBeenShutDown.get();
 	}
 
-
 	private class ClientShutdownHook extends Thread {
 		@Override
 		public void run() {
@@ -446,14 +446,13 @@ public class YarnClusterClient extends ClusterClient {
 		private final Object lock = new Object();
 		private ApplicationReport lastReport;
 
-
 		public PollingThread(YarnClient yarnClient, ApplicationId appId) {
 			this.yarnClient = yarnClient;
 			this.appId = appId;
 		}
 
 		public void stopRunner() {
-			if(!running.get()) {
+			if (!running.get()) {
 				LOG.warn("Polling thread was already stopped");
 			}
 			running.set(false);
@@ -484,7 +483,7 @@ public class YarnClusterClient extends ClusterClient {
 					stopRunner();
 				}
 			}
-			if(running.get() && !yarnClient.isInState(Service.STATE.STARTED)) {
+			if (running.get() && !yarnClient.isInState(Service.STATE.STARTED)) {
 				// == if the polling thread is still running but the yarn client is stopped.
 				LOG.warn("YARN client is unexpected in state " + yarnClient.getServiceState());
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientV2.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientV2.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientV2.java
index 33d5987..f58e6aa 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientV2.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterClientV2.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.flink.yarn;
 
 import org.apache.flink.api.common.JobSubmissionResult;
@@ -23,6 +24,7 @@ import org.apache.flink.client.program.ProgramInvocationException;
 import org.apache.flink.runtime.clusterframework.ApplicationStatus;
 import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse;
 import org.apache.flink.runtime.jobgraph.JobGraph;
+
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -37,7 +39,7 @@ import java.util.List;
 
 /**
  * Java representation of a running Flink job on YARN.
- * Since flip-6, a flink job will be run as a yarn job by default, each job has a jobmaster, 
+ * Since flip-6, a flink job will be run as a yarn job by default, each job has a jobmaster,
  * so this class will be used as a client to communicate with yarn and start the job on yarn.
  */
 public class YarnClusterClientV2 extends ClusterClient {
@@ -95,7 +97,7 @@ public class YarnClusterClientV2 extends ClusterClient {
 			if (report.getYarnApplicationState().equals(YarnApplicationState.RUNNING)) {
 				appId = report.getApplicationId();
 				trackingURL = report.getTrackingUrl();
-				logAndSysout("Please refer to " + getWebInterfaceURL() 
+				logAndSysout("Please refer to " + getWebInterfaceURL()
 						+ " for the running status of job " +  jobGraph.getJobID().toString());
 				//TODO: not support attach mode now
 				return new JobSubmissionResult(jobGraph.getJobID());
@@ -112,7 +114,7 @@ public class YarnClusterClientV2 extends ClusterClient {
 	@Override
 	public String getWebInterfaceURL() {
 		// there seems to be a difference between HD 2.2.0 and 2.6.0
-		if(!trackingURL.startsWith("http://")) {
+		if (!trackingURL.startsWith("http://")) {
 			return "http://" + trackingURL;
 		} else {
 			return trackingURL;

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
index 5f745b2..db5206a 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptor.java
@@ -15,8 +15,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.flink.yarn;
 
+package org.apache.flink.yarn;
 
 /**
  * Default implementation of {@link AbstractYarnClusterDescriptor} which starts an {@link YarnApplicationMasterRunner}.

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptorV2.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptorV2.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptorV2.java
index e3bd944..b22b163 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptorV2.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnClusterDescriptorV2.java
@@ -15,13 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.flink.yarn;
 
+package org.apache.flink.yarn;
 
 /**
  * Implementation of {@link org.apache.flink.yarn.AbstractYarnClusterDescriptor} which is used to start the new application master for a job under flip-6.
  * This implementation is now however tricky, since YarnClusterDescriptorV2 is related YarnClusterClientV2, but AbstractYarnClusterDescriptor is related
- * to YarnClusterClient. We should let YarnClusterDescriptorV2 implements ClusterDescriptor<YarnClusterClientV2>.
+ * to YarnClusterClient. We should let YarnClusterDescriptorV2 implements ClusterDescriptor&lt;YarnClusterClientV2&gt;.
  * However, in order to use the code in AbstractYarnClusterDescriptor for setting environments and so on, we make YarnClusterDescriptorV2 as now.
  */
 public class YarnClusterDescriptorV2 extends AbstractYarnClusterDescriptor {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnConfigKeys.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnConfigKeys.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnConfigKeys.java
index 7c9c7a7..03d94fe 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnConfigKeys.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnConfigKeys.java
@@ -27,9 +27,9 @@ public class YarnConfigKeys {
 	//  Environment variable names
 	// ------------------------------------------------------------------------
 
-	public final static String ENV_TM_MEMORY = "_CLIENT_TM_MEMORY";
-	public final static String ENV_TM_COUNT = "_CLIENT_TM_COUNT";
-	public final static String ENV_APP_ID = "_APP_ID";
+	public static final String ENV_TM_MEMORY = "_CLIENT_TM_MEMORY";
+	public static final String ENV_TM_COUNT = "_CLIENT_TM_COUNT";
+	public static final String ENV_APP_ID = "_APP_ID";
 	public static final String ENV_CLIENT_HOME_DIR = "_CLIENT_HOME_DIR";
 	public static final String ENV_CLIENT_SHIP_FILES = "_CLIENT_SHIP_FILES";
 	public static final String ENV_SLOTS = "_SLOTS";
@@ -38,12 +38,12 @@ public class YarnConfigKeys {
 
 	public static final String ENV_FLINK_CLASSPATH = "_FLINK_CLASSPATH";
 
-	public final static String FLINK_JAR_PATH = "_FLINK_JAR_PATH"; // the Flink jar resource location (in HDFS).
-	public final static String FLINK_YARN_FILES = "_FLINK_YARN_FILES"; // the root directory for all yarn application files
+	public static final String FLINK_JAR_PATH = "_FLINK_JAR_PATH"; // the Flink jar resource location (in HDFS).
+	public static final String FLINK_YARN_FILES = "_FLINK_YARN_FILES"; // the root directory for all yarn application files
 
-	public final static String KEYTAB_PATH = "_KEYTAB_PATH";
-	public final static String KEYTAB_PRINCIPAL = "_KEYTAB_PRINCIPAL";
-	public final static String ENV_HADOOP_USER_NAME = "HADOOP_USER_NAME";
+	public static final String KEYTAB_PATH = "_KEYTAB_PATH";
+	public static final String KEYTAB_PRINCIPAL = "_KEYTAB_PRINCIPAL";
+	public static final String ENV_HADOOP_USER_NAME = "HADOOP_USER_NAME";
 	public static final String ENV_ZOOKEEPER_NAMESPACE = "_ZOOKEEPER_NAMESPACE";
 
 	public static final String ENV_KRB5_PATH = "_KRB5_PATH";
@@ -51,7 +51,7 @@ public class YarnConfigKeys {
 
 	// ------------------------------------------------------------------------
 
-	/** Private constructor to prevent instantiation */
+	/** Private constructor to prevent instantiation. */
 	private YarnConfigKeys() {}
 
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnContainerInLaunch.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnContainerInLaunch.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnContainerInLaunch.java
index 370df26..9a98519 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnContainerInLaunch.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnContainerInLaunch.java
@@ -20,6 +20,7 @@ package org.apache.flink.yarn;
 
 import org.apache.flink.runtime.clusterframework.types.ResourceID;
 import org.apache.flink.runtime.clusterframework.types.ResourceIDRetrievable;
+
 import org.apache.hadoop.yarn.api.records.Container;
 
 import static java.util.Objects.requireNonNull;
@@ -34,7 +35,7 @@ public class YarnContainerInLaunch implements ResourceIDRetrievable {
 
 	private final long timestamp;
 
-	/** The resource id associated with this worker type */
+	/** The resource id associated with this worker type. */
 	private final ResourceID resourceID;
 
 	public YarnContainerInLaunch(Container container) {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkApplicationMasterRunner.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkApplicationMasterRunner.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkApplicationMasterRunner.java
index 3f4d4f6..2ad9065 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkApplicationMasterRunner.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkApplicationMasterRunner.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorSystem;
 import org.apache.flink.api.common.JobExecutionResult;
 import org.apache.flink.api.common.time.Time;
 import org.apache.flink.configuration.ConfigConstants;
@@ -46,9 +45,10 @@ import org.apache.flink.runtime.rpc.akka.AkkaRpcService;
 import org.apache.flink.runtime.util.EnvironmentInformation;
 import org.apache.flink.runtime.util.JvmShutdownSafeguard;
 import org.apache.flink.runtime.util.SignalHandler;
+
+import akka.actor.ActorSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.concurrent.duration.FiniteDuration;
 
 import javax.annotation.concurrent.GuardedBy;
 
@@ -57,32 +57,33 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 
+import scala.concurrent.duration.FiniteDuration;
+
 /**
  * This class is the executable entry point for the YARN Application Master that
  * executes a single Flink job and then shuts the YARN application down.
- * 
+ *
  * <p>The lifetime of the YARN application bound to that of the Flink job. Other
  * YARN Application Master implementations are for example the YARN session.
- * 
- * It starts actor system and the actors for {@link JobManagerRunner}
+ *
+ * <p>It starts actor system and the actors for {@link JobManagerRunner}
  * and {@link YarnResourceManager}.
  *
- * The JobManagerRunner start a {@link org.apache.flink.runtime.jobmaster.JobMaster}
+ * <p>The JobManagerRunner start a {@link org.apache.flink.runtime.jobmaster.JobMaster}
  * JobMaster handles Flink job execution, while the YarnResourceManager handles container
  * allocation and failure detection.
  */
 public class YarnFlinkApplicationMasterRunner extends AbstractYarnFlinkApplicationMasterRunner
 		implements OnCompletionActions, FatalErrorHandler {
 
-	/** Logger */
 	protected static final Logger LOG = LoggerFactory.getLogger(YarnFlinkApplicationMasterRunner.class);
 
-	/** The job graph file path */
+	/** The job graph file path. */
 	private static final String JOB_GRAPH_FILE_PATH = "flink.jobgraph.path";
 
 	// ------------------------------------------------------------------------
 
-	/** The lock to guard startup / shutdown / manipulation methods */
+	/** The lock to guard startup / shutdown / manipulation methods. */
 	private final Object lock = new Object();
 
 	@GuardedBy("lock")
@@ -144,7 +145,7 @@ public class YarnFlinkApplicationMasterRunner extends AbstractYarnFlinkApplicati
 					HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION);
 
 				heartbeatServices = HeartbeatServices.fromConfiguration(config);
-				
+
 				metricRegistry = new MetricRegistry(MetricRegistryConfiguration.fromConfiguration(config));
 
 				// ---- (2) init resource manager -------
@@ -310,7 +311,7 @@ public class YarnFlinkApplicationMasterRunner extends AbstractYarnFlinkApplicati
 	//----------------------------------------------------------------------------------------------
 
 	/**
-	 * Job completion notification triggered by JobManager
+	 * Job completion notification triggered by JobManager.
 	 */
 	@Override
 	public void jobFinished(JobExecutionResult result) {
@@ -318,7 +319,7 @@ public class YarnFlinkApplicationMasterRunner extends AbstractYarnFlinkApplicati
 	}
 
 	/**
-	 * Job completion notification triggered by JobManager
+	 * Job completion notification triggered by JobManager.
 	 */
 	@Override
 	public void jobFailed(Throwable cause) {
@@ -326,7 +327,7 @@ public class YarnFlinkApplicationMasterRunner extends AbstractYarnFlinkApplicati
 	}
 
 	/**
-	 * Job completion notification triggered by self
+	 * Job completion notification triggered by self.
 	 */
 	@Override
 	public void jobFinishedByOther() {

http://git-wip-us.apache.org/repos/asf/flink/blob/77b0fb9f/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkResourceManager.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkResourceManager.java b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkResourceManager.java
index 3c85795..4626a7e 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkResourceManager.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/YarnFlinkResourceManager.java
@@ -18,15 +18,12 @@
 
 package org.apache.flink.yarn;
 
-import akka.actor.ActorRef;
-import akka.actor.Props;
-
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.runtime.akka.AkkaUtils;
-import org.apache.flink.runtime.clusterframework.FlinkResourceManager;
 import org.apache.flink.runtime.clusterframework.ApplicationStatus;
 import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters;
+import org.apache.flink.runtime.clusterframework.FlinkResourceManager;
 import org.apache.flink.runtime.clusterframework.messages.StopCluster;
 import org.apache.flink.runtime.clusterframework.types.ResourceID;
 import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
@@ -34,6 +31,8 @@ import org.apache.flink.util.Preconditions;
 import org.apache.flink.yarn.messages.ContainersAllocated;
 import org.apache.flink.yarn.messages.ContainersComplete;
 
+import akka.actor.ActorRef;
+import akka.actor.Props;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -46,7 +45,6 @@ import org.apache.hadoop.yarn.client.api.AMRMClient;
 import org.apache.hadoop.yarn.client.api.NMClient;
 import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-
 import org.slf4j.Logger;
 
 import java.lang.reflect.Method;
@@ -66,57 +64,57 @@ import static java.util.Objects.requireNonNull;
  */
 public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYarnWorkerNode> {
 
-	/** The heartbeat interval while the resource master is waiting for containers */
+	/** The heartbeat interval while the resource master is waiting for containers. */
 	private static final int FAST_YARN_HEARTBEAT_INTERVAL_MS = 500;
 
-	/** The default heartbeat interval during regular operation */
+	/** The default heartbeat interval during regular operation. */
 	private static final int DEFAULT_YARN_HEARTBEAT_INTERVAL_MS = 5000;
 
 	/** Environment variable name of the final container id used by the Flink ResourceManager.
 	 * Container ID generation may vary across Hadoop versions. */
-	final static String ENV_FLINK_CONTAINER_ID = "_FLINK_CONTAINER_ID";
+	static final String ENV_FLINK_CONTAINER_ID = "_FLINK_CONTAINER_ID";
 
-	/** The containers where a TaskManager is starting and we are waiting for it to register */
+	/** The containers where a TaskManager is starting and we are waiting for it to register. */
 	private final Map<ResourceID, YarnContainerInLaunch> containersInLaunch;
 
 	/** Containers we have released, where we are waiting for an acknowledgement that
-	 * they are released */
+	 * they are released. */
 	private final Map<ContainerId, Container> containersBeingReturned;
 
-	/** The YARN / Hadoop configuration object */
+	/** The YARN / Hadoop configuration object. */
 	private final YarnConfiguration yarnConfig;
 
-	/** The TaskManager container parameters (like container memory size) */
+	/** The TaskManager container parameters (like container memory size). */
 	private final ContaineredTaskManagerParameters taskManagerParameters;
 
-	/** Context information used to start a TaskManager Java process */
+	/** Context information used to start a TaskManager Java process. */
 	private final ContainerLaunchContext taskManagerLaunchContext;
 
-	/** Host name for the container running this process */
+	/** Host name for the container running this process. */
 	private final String applicationMasterHostName;
 
-	/** Web interface URL, may be null */
+	/** Web interface URL, may be null. */
 	private final String webInterfaceURL;
 
-	/** Default heartbeat interval between this actor and the YARN ResourceManager */
+	/** Default heartbeat interval between this actor and the YARN ResourceManager. */
 	private final int yarnHeartbeatIntervalMillis;
 
-	/** Number of failed TaskManager containers before stopping the application. -1 means infinite. */ 
+	/** Number of failed TaskManager containers before stopping the application. -1 means infinite. */
 	private final int maxFailedContainers;
 
-	/** Callback handler for the asynchronous resourceManagerClient */
+	/** Callback handler for the asynchronous resourceManagerClient. */
 	private YarnResourceManagerCallbackHandler resourceManagerCallbackHandler;
 
-	/** Client to communicate with the Resource Manager (YARN's master) */
+	/** Client to communicate with the Resource Manager (YARN's master). */
 	private AMRMClientAsync<AMRMClient.ContainerRequest> resourceManagerClient;
 
-	/** Client to communicate with the Node manager and launch TaskManager processes */
+	/** Client to communicate with the Node manager and launch TaskManager processes. */
 	private NMClient nodeManagerClient;
 
-	/** The number of containers requested, but not yet granted */
+	/** The number of containers requested, but not yet granted. */
 	private int numPendingContainerRequests;
 
-	/** The number of failed containers since the master became active */
+	/** The number of failed containers since the master became active. */
 	private int failedContainersSoFar;
 
 	/** A reference to the reflector to look up previous session containers. */
@@ -428,7 +426,7 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 	}
 
 	// ------------------------------------------------------------------------
-	//  Callbacks from the YARN Resource Manager 
+	//  Callbacks from the YARN Resource Manager
 	// ------------------------------------------------------------------------
 
 	private void containersAllocated(List<Container> containers) {
@@ -491,7 +489,7 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 	/**
 	 * Invoked when the ResourceManager informs of completed containers.
 	 * Called via an actor message by the callback from the ResourceManager client.
-	 * 
+	 *
 	 * @param containers The containers that have completed.
 	 */
 	private void containersComplete(List<ContainerStatus> containers) {
@@ -624,8 +622,8 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 		private Logger logger;
 		private Method method;
 
-		public RegisterApplicationMasterResponseReflector(Logger LOG) {
-			this.logger = LOG;
+		public RegisterApplicationMasterResponseReflector(Logger log) {
+			this.logger = log;
 
 			try {
 				method = RegisterApplicationMasterResponse.class
@@ -671,12 +669,12 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 
 	/**
 	 * Creates the props needed to instantiate this actor.
-	 * 
-	 * Rather than extracting and validating parameters in the constructor, this factory method takes
+	 *
+	 * <p>Rather than extracting and validating parameters in the constructor, this factory method takes
 	 * care of that. That way, errors occur synchronously, and are not swallowed simply in a
 	 * failed asynchronous attempt to start the actor.
-	 
-	 * @param actorClass 
+	 *
+	 * @param actorClass
 	 *             The actor class, to allow overriding this actor with subclasses for testing.
 	 * @param flinkConfig
 	 *             The Flink configuration object.
@@ -694,7 +692,7 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 	 *             The initial number of TaskManagers to allocate.
 	 * @param log
 	 *             The logger to log to.
-	 * 
+	 *
 	 * @return The Props object to instantiate the YarnFlinkResourceManager actor.
 	 */
 	public static Props createActorProps(Class<? extends YarnFlinkResourceManager> actorClass,
@@ -706,8 +704,8 @@ public class YarnFlinkResourceManager extends FlinkResourceManager<RegisteredYar
 			ContaineredTaskManagerParameters taskManagerParameters,
 			ContainerLaunchContext taskManagerLaunchContext,
 			int numInitialTaskManagers,
-			Logger log)
-	{
+			Logger log) {
+
 		final int yarnHeartbeatIntervalMS = flinkConfig.getInteger(
 			ConfigConstants.YARN_HEARTBEAT_DELAY_SECONDS, DEFAULT_YARN_HEARTBEAT_INTERVAL_MS / 1000) * 1000;
 


[05/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegreeTest.java
index b72f0ef..1517f23 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegreeTest.java
@@ -27,10 +27,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexOutDegree}.
+ */
 public class VertexOutDegreeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java
index c65ef2d..5f492e4 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePairTest.java
@@ -28,10 +28,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeDegreePair}.
+ */
 public class EdgeDegreePairTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java
index 2fd0b19..393220d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegreeTest.java
@@ -28,10 +28,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeSourceDegree}.
+ */
 public class EdgeSourceDegreeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java
index 34aca35..782296a 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegreeTest.java
@@ -28,10 +28,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeTargetDegree}.
+ */
 public class EdgeTargetDegreeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java
index c157cc1..192782d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegreeTest.java
@@ -27,10 +27,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexDegree}.
+ */
 public class VertexDegreeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java
index f017750..f03d82c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegreeTest.java
@@ -26,10 +26,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link MaximumDegree}.
+ */
 public class MaximumDegreeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java
index 709317c..a3aad4b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/directed/SimplifyTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Before;
 import org.junit.Test;
 
 import java.util.LinkedList;
 import java.util.List;
 
+/**
+ * Tests for {@link Simplify}.
+ */
 public class SimplifyTest {
 
 	protected Graph<IntValue, NullValue, NullValue> graph;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java
index d589000..6ff4292 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/simple/undirected/SimplifyTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Before;
 import org.junit.Test;
 
 import java.util.LinkedList;
 import java.util.List;
 
+/**
+ * Tests for {@link Simplify}.
+ */
 public class SimplifyTest {
 
 	protected Graph<IntValue, NullValue, NullValue> graph;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/TranslateTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/TranslateTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/TranslateTest.java
index 7d6e3ea..dacea26 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/TranslateTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/TranslateTest.java
@@ -27,6 +27,7 @@ import org.apache.flink.graph.asm.translate.translators.LongValueToStringValue;
 import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.StringValue;
+
 import org.junit.Before;
 import org.junit.Test;
 
@@ -35,6 +36,9 @@ import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for translation of {@link Graph} IDs and values.
+ */
 public class TranslateTest {
 
 	private Graph<LongValue, LongValue, LongValue> graph;
@@ -72,10 +76,10 @@ public class TranslateTest {
 		List<Vertex<LongValue, LongValue>> vertexList = new LinkedList<>();
 		List<Edge<LongValue, LongValue>> edgeList = new LinkedList<>();
 
-		for (long l = 0 ; l < count ; l++) {
+		for (long l = 0; l < count; l++) {
 			LongValue lv0 = new LongValue(l);
-			LongValue lv1 = new LongValue(l+1);
-			LongValue lv2 = new LongValue(l+2);
+			LongValue lv1 = new LongValue(l + 1);
+			LongValue lv2 = new LongValue(l + 2);
 			vertexList.add(new Vertex<>(lv0, lv1));
 			edgeList.add(new Edge<>(lv0, lv1, lv2));
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffsetTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffsetTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffsetTest.java
index ad63209..823de3d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffsetTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffsetTest.java
@@ -19,10 +19,14 @@
 package org.apache.flink.graph.asm.translate.translators;
 
 import org.apache.flink.types.LongValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LongValueAddOffset}.
+ */
 public class LongValueAddOffsetTest {
 
 	@Test

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValueTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValueTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValueTest.java
index f730adf..c1f1966 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValueTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValueTest.java
@@ -21,10 +21,14 @@ package org.apache.flink.graph.asm.translate.translators;
 import org.apache.flink.graph.asm.translate.TranslateFunction;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LongValueToSignedIntValue}.
+ */
 public class LongValueToSignedIntValueTest {
 
 	private TranslateFunction<LongValue, IntValue> translator = new LongValueToSignedIntValue();
@@ -33,18 +37,18 @@ public class LongValueToSignedIntValueTest {
 
 	@Test
 	public void testTranslation() throws Exception {
-		assertEquals(new IntValue(Integer.MIN_VALUE), translator.translate(new LongValue((long)Integer.MIN_VALUE), reuse));
+		assertEquals(new IntValue(Integer.MIN_VALUE), translator.translate(new LongValue((long) Integer.MIN_VALUE), reuse));
 		assertEquals(new IntValue(0), translator.translate(new LongValue(0L), reuse));
-		assertEquals(new IntValue(Integer.MAX_VALUE), translator.translate(new LongValue((long)Integer.MAX_VALUE), reuse));
+		assertEquals(new IntValue(Integer.MAX_VALUE), translator.translate(new LongValue((long) Integer.MAX_VALUE), reuse));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testUpperOutOfRange() throws Exception {
-		translator.translate(new LongValue((long)Integer.MAX_VALUE + 1), reuse);
+		translator.translate(new LongValue((long) Integer.MAX_VALUE + 1), reuse);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testLowerOutOfRange() throws Exception {
-		translator.translate(new LongValue((long)Integer.MIN_VALUE - 1), reuse);
+		translator.translate(new LongValue((long) Integer.MIN_VALUE - 1), reuse);
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToStringValueTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToStringValueTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToStringValueTest.java
index 8980cd3..db72121 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToStringValueTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToStringValueTest.java
@@ -21,10 +21,14 @@ package org.apache.flink.graph.asm.translate.translators;
 import org.apache.flink.graph.asm.translate.TranslateFunction;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.StringValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LongValueToStringValue}.
+ */
 public class LongValueToStringValueTest {
 
 	private TranslateFunction<LongValue, StringValue> translator = new LongValueToStringValue();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValueTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValueTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValueTest.java
index ca70162..1cd51ad 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValueTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValueTest.java
@@ -21,10 +21,14 @@ package org.apache.flink.graph.asm.translate.translators;
 import org.apache.flink.graph.asm.translate.TranslateFunction;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LongValueToUnsignedIntValue}.
+ */
 public class LongValueToUnsignedIntValueTest {
 
 	private TranslateFunction<LongValue, IntValue> translator = new LongValueToUnsignedIntValue();
@@ -38,12 +42,12 @@ public class LongValueToUnsignedIntValueTest {
 		assertEquals(new IntValue(-1), translator.translate(new LongValue((1L << 32) - 1), reuse));
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testUpperOutOfRange() throws Exception {
 		translator.translate(new LongValue(1L << 32), reuse);
 	}
 
-	@Test(expected=IllegalArgumentException.class)
+	@Test(expected = IllegalArgumentException.class)
 	public void testLowerOutOfRange() throws Exception {
 		translator.translate(new LongValue(-1), reuse);
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/ToNullValueTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/ToNullValueTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/ToNullValueTest.java
index 6d8a8da..416c479 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/ToNullValueTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/asm/translate/translators/ToNullValueTest.java
@@ -24,10 +24,14 @@ import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
 import org.apache.flink.types.StringValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link ToNullValue}.
+ */
 public class ToNullValueTest {
 
 	@Test

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteEdgeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteEdgeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteEdgeTest.java
index ad0106b..b27ece5 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteEdgeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteEdgeTest.java
@@ -18,11 +18,13 @@
 
 package org.apache.flink.graph.bipartite;
 
-import org.apache.flink.graph.bipartite.BipartiteEdge;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link BipartiteEdge}.
+ */
 public class BipartiteEdgeTest {
 
 	private static final int BOTTOM_ID = 0;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteGraphTest.java
index 366cf8e..0ee6e3a 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/BipartiteGraphTest.java
@@ -24,6 +24,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.test.util.TestBaseUtils;
+
 import org.junit.Test;
 
 import java.util.Arrays;
@@ -31,6 +32,9 @@ import java.util.Arrays;
 import static org.apache.flink.graph.generator.TestUtils.compareGraph;
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link BipartiteGraph}.
+ */
 public class BipartiteGraphTest {
 
 	@Test

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/ProjectionTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/ProjectionTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/ProjectionTest.java
index 3aafe64..6301045 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/ProjectionTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/bipartite/ProjectionTest.java
@@ -19,10 +19,14 @@
 package org.apache.flink.graph.bipartite;
 
 import org.apache.flink.graph.Vertex;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link Projection}.
+ */
 public class ProjectionTest {
 
 	private static final int ID = 10;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/AbstractGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/AbstractGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/AbstractGraphTest.java
deleted file mode 100644
index 0f65e32..0000000
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/AbstractGraphTest.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.generator;
-
-import org.apache.flink.api.java.ExecutionEnvironment;
-import org.junit.Before;
-
-public class AbstractGraphTest {
-
-	protected ExecutionEnvironment env;
-
-	@Before
-	public void setup() {
-		env = ExecutionEnvironment.createCollectionsEnvironment();
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java
index aae88ca..b7197a4 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CirculantGraphTest.java
@@ -24,14 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
-import org.junit.Rule;
+
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link CirculantGraph}.
+ */
 public class CirculantGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java
index cb06da5..1791f2e 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CompleteGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link CompleteGraph}.
+ */
 public class CompleteGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()
@@ -54,7 +58,7 @@ extends AbstractGraphTest {
 			.generate();
 
 		assertEquals(vertexCount, graph.numberOfVertices());
-		assertEquals(vertexCount*(vertexCount-1), graph.numberOfEdges());
+		assertEquals(vertexCount * (vertexCount - 1), graph.numberOfEdges());
 
 		long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
 		long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java
index be56f56..e4e2960 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/CycleGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link CycleGraph}.
+ */
 public class CycleGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java
index 777b576..fc64d62 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EchoGraphTest.java
@@ -24,14 +24,18 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EchoGraph}.
+ */
 public class EchoGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Rule
 	public ExpectedException thrown = ExpectedException.none();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java
index c039607..939d871 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/EmptyGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EmptyGraph}.
+ */
 public class EmptyGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GraphGeneratorTestBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GraphGeneratorTestBase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GraphGeneratorTestBase.java
new file mode 100644
index 0000000..85a9ef0
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GraphGeneratorTestBase.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.generator;
+
+import org.apache.flink.api.java.ExecutionEnvironment;
+
+import org.junit.Before;
+
+/**
+ * Base class for graph generator tests.
+ */
+public abstract class GraphGeneratorTestBase {
+
+	protected ExecutionEnvironment env;
+
+	@Before
+	public void setup() {
+		env = ExecutionEnvironment.createCollectionsEnvironment();
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java
index 6a01a34..c40d456 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/GridGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link GridGraph}.
+ */
 public class GridGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()
@@ -60,8 +64,8 @@ extends AbstractGraphTest {
 
 		// Each vertex is the source of one edge in the first dimension of size 2,
 		// and the source of two edges in each dimension of size greater than 2.
-		assertEquals(2*3*5*7, graph.numberOfVertices());
-		assertEquals(7 * 2*3*5*7, graph.numberOfEdges());
+		assertEquals(2 * 3 * 5 * 7, graph.numberOfVertices());
+		assertEquals(7 * 2 * 3 * 5 * 7, graph.numberOfEdges());
 
 		long minInDegree = graph.inDegrees().min(1).collect().get(0).f1.getValue();
 		long minOutDegree = graph.outDegrees().min(1).collect().get(0).f1.getValue();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java
index 49b0ba7..05f84cd 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/HypercubeGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link HypercubeGraph}.
+ */
 public class HypercubeGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java
index b5c7cd3..19b0f25 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/PathGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link PathGraph}.
+ */
 public class PathGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java
index 2cda69a..920fc4e 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/RMatGraphTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.generator;
 
-import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.apache.flink.api.java.io.DiscardingOutputFormat;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
@@ -27,13 +26,18 @@ import org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory;
 import org.apache.flink.graph.generator.random.RandomGenerableFactory;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link RMatGraph}.
+ */
 public class RMatGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraphMetrics()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java
index 5e9a79a..045354d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/SingletonEdgeGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link SingletonEdgeGraph}.
+ */
 public class SingletonEdgeGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java
index 4ccb804..ee9df74 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/StarGraphTest.java
@@ -24,12 +24,16 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link StarGraph}.
+ */
 public class StarGraphTest
-extends AbstractGraphTest {
+extends GraphGeneratorTestBase {
 
 	@Test
 	public void testGraph()

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/TestUtils.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/TestUtils.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/TestUtils.java
index 23ad31c..fd099c0 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/TestUtils.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/generator/TestUtils.java
@@ -35,8 +35,13 @@ import java.util.List;
 
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Utility methods for testing graph algorithms.
+ */
 public final class TestUtils {
 
+	private TestUtils() {}
+
 	/**
 	 * Compare graph vertices and edges against expected values.
 	 *

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java
index cd677b6..70a2d15 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSACompilerTest.java
@@ -18,10 +18,6 @@
 
 package org.apache.flink.graph.gsa;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 import org.apache.flink.api.common.Plan;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.common.operators.util.FieldList;
@@ -43,75 +39,75 @@ import org.apache.flink.optimizer.plan.WorksetIterationPlanNode;
 import org.apache.flink.optimizer.util.CompilerTestBase;
 import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Validate compiled {@link GatherSumApplyIteration} programs.
+ */
 public class GSACompilerTest extends CompilerTestBase {
 
 	private static final long serialVersionUID = 1L;
 
 	@Test
 	public void testGSACompiler() {
-		try {
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple3<>(
-						1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<Long, NullValue>());
-
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(edges, new InitVertices(), env);
-
-				DataSet<Vertex<Long, Long>> result = graph.runGatherSumApplyIteration(
-						new GatherNeighborIds(), new SelectMinId(),
-						new UpdateComponentId(), 100).getVertices();
-				
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-			}
-			
-			Plan p = env.createProgramPlan("GSA Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			assertEquals(PartitioningProperty.HASH_PARTITIONED, sink.getGlobalProperties().getPartitioning());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-			
-			// check the solution set join and the delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update function preserves the partitioning
-			
-			DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
-			assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
-			assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
-			
-			// check the workset set join
-			SingleInputPlanNode sumReducer = (SingleInputPlanNode) ssJoin.getInput1().getSource();
-			SingleInputPlanNode gatherMapper = (SingleInputPlanNode) sumReducer.getInput().getSource();
-			DualInputPlanNode edgeJoin = (DualInputPlanNode) gatherMapper.getInput().getSource(); 
-			assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
-			// input1 is the workset
-			assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput1().getShipStrategy());
-			// input2 is the edges
-			assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput2().getShipStrategy());
-			assertTrue(edgeJoin.getInput2().getTempMode().isCached());
-
-			assertEquals(new FieldList(0), edgeJoin.getInput2().getShipStrategyKeys());
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+
+		// compose test program
+
+		DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple3<>(
+			1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<Long, NullValue>());
+
+		Graph<Long, Long, NullValue> graph = Graph.fromDataSet(edges, new InitVertices(), env);
+
+		DataSet<Vertex<Long, Long>> result = graph.runGatherSumApplyIteration(
+			new GatherNeighborIds(), new SelectMinId(),
+			new UpdateComponentId(), 100).getVertices();
+
+		result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
+
+		Plan p = env.createProgramPlan("GSA Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+		assertEquals(PartitioningProperty.HASH_PARTITIONED, sink.getGlobalProperties().getPartitioning());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the solution set join and the delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update function preserves the partitioning
+
+		DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
+		assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
+		assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
+
+		// check the workset set join
+		SingleInputPlanNode sumReducer = (SingleInputPlanNode) ssJoin.getInput1().getSource();
+		SingleInputPlanNode gatherMapper = (SingleInputPlanNode) sumReducer.getInput().getSource();
+		DualInputPlanNode edgeJoin = (DualInputPlanNode) gatherMapper.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
+		// input1 is the workset
+		assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput1().getShipStrategy());
+		// input2 is the edges
+		assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput2().getShipStrategy());
+		assertTrue(edgeJoin.getInput2().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), edgeJoin.getInput2().getShipStrategyKeys());
 	}
 
 	@SuppressWarnings("serial")
-	private static final class InitVertices	implements MapFunction<Long, Long> {
+	private static final class InitVertices implements MapFunction<Long, Long> {
 
 		public Long map(Long vertexId) {
 			return vertexId;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java
index 2deebcb..0dfbcb7 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/gsa/GSATranslationTest.java
@@ -18,11 +18,6 @@
 
 package org.apache.flink.graph.gsa;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 import org.apache.flink.api.common.aggregators.LongSumAggregator;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.java.DataSet;
@@ -38,98 +33,95 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.utils.Tuple3ToEdgeMap;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test the creation of a {@link GatherSumApplyIteration} program.
+ */
 public class GSATranslationTest {
 
+	private static final String ITERATION_NAME = "Test Name";
+
+	private static final String AGGREGATOR_NAME = "AggregatorName";
+
+	private static final String BC_SET_GATHER_NAME = "gather messages";
+
+	private static final String BC_SET_SUM_NAME = "sum updates";
+
+	private static final String BC_SET_APLLY_NAME = "apply updates";
+
+	private static final int NUM_ITERATIONS = 13;
+
+	private static final int ITERATION_parallelism = 77;
+
 	@Test
 	public void testTranslation() {
-		try {
-			final String ITERATION_NAME = "Test Name";
-			
-			final String AGGREGATOR_NAME = "AggregatorName";
-			
-			final String BC_SET_GATHER_NAME = "gather messages";
-			
-			final String BC_SET_SUM_NAME = "sum updates";
-
-			final String BC_SET_APLLY_NAME = "apply updates";
-
-			final int NUM_ITERATIONS = 13;
-			
-			final int ITERATION_parallelism = 77;
-			
-			
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			
-			DataSet<Long> bcGather = env.fromElements(1L);
-			DataSet<Long> bcSum = env.fromElements(1L);
-			DataSet<Long> bcApply = env.fromElements(1L);
-
-			DataSet<Vertex<Long, Long>> result;
-
-			// ------------ construct the test program ------------------
-			{
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple3<>(
-						1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<Long, NullValue>());
-
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(edges, new InitVertices(), env);
-
-				GSAConfiguration parameters = new GSAConfiguration();
-
-				parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
-				parameters.setName(ITERATION_NAME);
-				parameters.setParallelism(ITERATION_parallelism);
-				parameters.addBroadcastSetForGatherFunction(BC_SET_GATHER_NAME, bcGather);
-				parameters.addBroadcastSetForSumFunction(BC_SET_SUM_NAME, bcSum);
-				parameters.addBroadcastSetForApplyFunction(BC_SET_APLLY_NAME, bcApply);
-
-				result = graph.runGatherSumApplyIteration(
-						new GatherNeighborIds(), new SelectMinId(),
-						new UpdateComponentId(), NUM_ITERATIONS, parameters).getVertices();
-				
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-			}
-			
-			
-			// ------------- validate the java program ----------------
-			
-			assertTrue(result instanceof DeltaIterationResultSet);
-			
-			DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
-			DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
-			
-			// check the basic iteration properties
-			assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
-			assertArrayEquals(new int[] {0}, resultSet.getKeyPositions());
-			assertEquals(ITERATION_parallelism, iteration.getParallelism());
-			assertEquals(ITERATION_NAME, iteration.getName());
-			
-			assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
-			
-			// validate that the semantic properties are set as they should
-			TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
-
-			SingleInputUdfOperator<?, ?, ?> sumReduce = (SingleInputUdfOperator<?, ?, ?>) solutionSetJoin.getInput1();
-			SingleInputUdfOperator<?, ?, ?> gatherMap = (SingleInputUdfOperator<?, ?, ?>) sumReduce.getInput();
-
-			// validate that the broadcast sets are forwarded
-			assertEquals(bcGather, gatherMap.getBroadcastSets().get(BC_SET_GATHER_NAME));
-			assertEquals(bcSum, sumReduce.getBroadcastSets().get(BC_SET_SUM_NAME));
-			assertEquals(bcApply, solutionSetJoin.getBroadcastSets().get(BC_SET_APLLY_NAME));
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+
+		DataSet<Long> bcGather = env.fromElements(1L);
+		DataSet<Long> bcSum = env.fromElements(1L);
+		DataSet<Long> bcApply = env.fromElements(1L);
+
+		DataSet<Vertex<Long, Long>> result;
+
+		// ------------ construct the test program ------------------
+
+		DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple3<>(
+			1L, 2L, NullValue.getInstance())).map(new Tuple3ToEdgeMap<Long, NullValue>());
+
+		Graph<Long, Long, NullValue> graph = Graph.fromDataSet(edges, new InitVertices(), env);
+
+		GSAConfiguration parameters = new GSAConfiguration();
+
+		parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
+		parameters.setName(ITERATION_NAME);
+		parameters.setParallelism(ITERATION_parallelism);
+		parameters.addBroadcastSetForGatherFunction(BC_SET_GATHER_NAME, bcGather);
+		parameters.addBroadcastSetForSumFunction(BC_SET_SUM_NAME, bcSum);
+		parameters.addBroadcastSetForApplyFunction(BC_SET_APLLY_NAME, bcApply);
+
+		result = graph.runGatherSumApplyIteration(
+			new GatherNeighborIds(), new SelectMinId(),
+			new UpdateComponentId(), NUM_ITERATIONS, parameters).getVertices();
+
+		result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
+
+		// ------------- validate the java program ----------------
+
+		assertTrue(result instanceof DeltaIterationResultSet);
+
+		DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
+		DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
+
+		// check the basic iteration properties
+		assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
+		assertArrayEquals(new int[]{0}, resultSet.getKeyPositions());
+		assertEquals(ITERATION_parallelism, iteration.getParallelism());
+		assertEquals(ITERATION_NAME, iteration.getName());
+
+		assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
+
+		// validate that the semantic properties are set as they should
+		TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
+
+		SingleInputUdfOperator<?, ?, ?> sumReduce = (SingleInputUdfOperator<?, ?, ?>) solutionSetJoin.getInput1();
+		SingleInputUdfOperator<?, ?, ?> gatherMap = (SingleInputUdfOperator<?, ?, ?>) sumReduce.getInput();
+
+		// validate that the broadcast sets are forwarded
+		assertEquals(bcGather, gatherMap.getBroadcastSets().get(BC_SET_GATHER_NAME));
+		assertEquals(bcSum, sumReduce.getBroadcastSets().get(BC_SET_SUM_NAME));
+		assertEquals(bcApply, solutionSetJoin.getBroadcastSets().get(BC_SET_APLLY_NAME));
 	}
 
 	@SuppressWarnings("serial")
-	private static final class InitVertices	implements MapFunction<Long, Long> {
+	private static final class InitVertices implements MapFunction<Long, Long> {
 
 		public Long map(Long vertexId) {
 			return vertexId;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java
index b602fe6..3e76005 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/ConnectedComponentsWithRandomisedEdgesITCase.java
@@ -31,6 +31,9 @@ import org.apache.flink.types.NullValue;
 
 import java.io.BufferedReader;
 
+/**
+ * Test {@link ConnectedComponents} with a randomly generated graph.
+ */
 @SuppressWarnings("serial")
 public class ConnectedComponentsWithRandomisedEdgesITCase extends JavaProgramTestBase {
 
@@ -67,7 +70,7 @@ public class ConnectedComponentsWithRandomisedEdgesITCase extends JavaProgramTes
 
 	/**
 	 * A map function that takes a Long value and creates a 2-tuple out of it:
-	 * <pre>(Long value) -> (value, value)</pre>
+	 * <pre>(Long value) -> (value, value)</pre>.
 	 */
 	public static final class IdAssigner implements MapFunction<Long, Vertex<Long, Long>> {
 		@Override
@@ -83,7 +86,7 @@ public class ConnectedComponentsWithRandomisedEdgesITCase extends JavaProgramTes
 		}
 	}
 
-	public static final class EdgeParser extends RichMapFunction<String, Edge<Long, NullValue>> {
+	private static final class EdgeParser extends RichMapFunction<String, Edge<Long, NullValue>> {
 		public Edge<Long, NullValue> map(String value) {
 			String[] nums = value.split(" ");
 			return new Edge<>(Long.parseLong(nums[0]), Long.parseLong(nums[1]),

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficientTest.java
index db0a8a1..6474199 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficientTest.java
@@ -23,10 +23,14 @@ import org.apache.flink.graph.library.clustering.directed.AverageClusteringCoeff
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link AverageClusteringCoefficient}.
+ */
 public class AverageClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -34,7 +38,7 @@ extends AsmTestBase {
 	public void testWithSimpleGraph()
 			throws Exception {
 		// see results in LocalClusteringCoefficientTest.testSimpleGraph
-		Result expectedResult = new Result(6, 1.0/2 + 2.0/6 + 2.0/6 + 1.0/12);
+		Result expectedResult = new Result(6, 1.0 / 2 + 2.0 / 6 + 2.0 / 6 + 1.0 / 12);
 
 		Result averageClusteringCoefficient = new AverageClusteringCoefficient<IntValue, NullValue, NullValue>()
 			.run(directedSimpleGraph)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficientTest.java
index 50006fc..d74948a 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficientTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link GlobalClusteringCoefficient}.
+ */
 public class GlobalClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -47,7 +51,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 
 		Result expectedResult = new Result(expectedCount, expectedCount);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java
index f1dd57b..a0aba65 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficientTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
@@ -27,12 +26,17 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LocalClusteringCoefficient}.
+ */
 public class LocalClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -57,7 +61,7 @@ extends AsmTestBase {
 	public void testCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedTriangleCount = 2 * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedTriangleCount = 2 * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 
 		DataSet<Result<LongValue>> cc = completeGraph
 			.run(new LocalClusteringCoefficient<LongValue, NullValue, NullValue>());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriadicCensusTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriadicCensusTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriadicCensusTest.java
index a4d1560..68d5d9d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriadicCensusTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriadicCensusTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.clustering.directed.TriadicCensus.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link TriadicCensus}.
+ */
 public class TriadicCensusTest
 extends AsmTestBase {
 
@@ -59,7 +63,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2) / 3;
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3;
 
 		Result expectedResult = new Result(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, expectedCount);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java
index 3999959..9eacb58 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/directed/TriangleListingTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode;
@@ -28,12 +27,17 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link TriangleListing}.
+ */
 public class TriangleListingTest
 extends AsmTestBase {
 
@@ -55,7 +59,7 @@ extends AsmTestBase {
 	public void testCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2) / 3;
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3;
 
 		DataSet<Result<LongValue>> tl = completeGraph
 			.run(new TriangleListing<LongValue, NullValue, NullValue>());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficientTest.java
index ee34ac6..0835169 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficientTest.java
@@ -23,10 +23,14 @@ import org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoe
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link AverageClusteringCoefficient}.
+ */
 public class AverageClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -34,7 +38,7 @@ extends AsmTestBase {
 	public void testWithSimpleGraph()
 			throws Exception {
 		// see results in LocalClusteringCoefficientTest.testSimpleGraph
-		Result expectedResult = new Result(6, 1.0/1 + 2.0/3 + 2.0/3 + 1.0/6);
+		Result expectedResult = new Result(6, 1.0 / 1 + 2.0 / 3 + 2.0 / 3 + 1.0 / 6);
 
 		Result averageClusteringCoefficient = new AverageClusteringCoefficient<IntValue, NullValue, NullValue>()
 			.run(undirectedSimpleGraph)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficientTest.java
index 1a73ce1..d9b0ab5 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficientTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link GlobalClusteringCoefficient}.
+ */
 public class GlobalClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -47,7 +51,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 
 		Result expectedResult = new Result(expectedCount, expectedCount);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java
index 2775a00..e00669b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficientTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode;
@@ -28,12 +27,17 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link LocalClusteringCoefficient}.
+ */
 public class LocalClusteringCoefficientTest
 extends AsmTestBase {
 
@@ -58,7 +62,7 @@ extends AsmTestBase {
 	public void testCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedTriangleCount = CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedTriangleCount = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 
 		DataSet<Result<LongValue>> cc = completeGraph
 			.run(new LocalClusteringCoefficient<LongValue, NullValue, NullValue>());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensusTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensusTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensusTest.java
index 87b4824..38cf981 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensusTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensusTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.clustering.undirected.TriadicCensus.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link TriadicCensus}.
+ */
 public class TriadicCensusTest
 extends AsmTestBase {
 
@@ -47,7 +51,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2) / 3;
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3;
 
 		Result expectedResult = new Result(0, 0, 0, expectedCount);
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java
index afbd740..ed9e4c2 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/clustering/undirected/TriangleListingTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode;
@@ -28,10 +27,15 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link TriangleListing}.
+ */
 public class TriangleListingTest
 extends AsmTestBase {
 
@@ -53,7 +57,7 @@ extends AsmTestBase {
 	public void testCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2) / 3;
+		long expectedCount = completeGraphVertexCount * CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2) / 3;
 
 		DataSet<Result<LongValue>> tl = completeGraph
 			.run(new TriangleListing<LongValue, NullValue, NullValue>());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/HITSTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/HITSTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/HITSTest.java
deleted file mode 100644
index 03334da..0000000
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/HITSTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.library.link_analysis;
-
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.graph.asm.AsmTestBase;
-import org.apache.flink.graph.asm.dataset.Collect;
-import org.apache.flink.graph.library.link_analysis.HITS.Result;
-import org.apache.flink.types.IntValue;
-import org.apache.flink.types.LongValue;
-import org.apache.flink.types.NullValue;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-public class HITSTest
-extends AsmTestBase {
-
-	/*
-	 * This test result can be verified with the following Python script.
-
-	import math
-	import networkx as nx
-
-	graph=nx.read_edgelist('directedSimpleGraph.csv', delimiter=',', create_using=nx.DiGraph())
-	hits=nx.algorithms.link_analysis.hits(graph)
-
-	hubbiness_norm=math.sqrt(sum(v*v for v in hits[0].values()))
-	authority_norm=math.sqrt(sum(v*v for v in hits[1].values()))
-
-	for key in sorted(hits[0]):
-		print('{}: {}, {}'.format(key, hits[0][key]/hubbiness_norm, hits[1][key]/authority_norm))
-	 */
-	@Test
-	public void testWithSimpleGraph()
-			throws Exception {
-		DataSet<Result<IntValue>> hits = new HITS<IntValue, NullValue, NullValue>(20)
-			.run(directedSimpleGraph);
-
-		List<Tuple2<Double, Double>> expectedResults = new ArrayList<>();
-		expectedResults.add(Tuple2.of(0.544643396306, 0.0));
-		expectedResults.add(Tuple2.of(0.0, 0.836329395866));
-		expectedResults.add(Tuple2.of(0.607227031134, 0.268492526138));
-		expectedResults.add(Tuple2.of(0.544643396306, 0.395444899355));
-		expectedResults.add(Tuple2.of(0.0, 0.268492526138));
-		expectedResults.add(Tuple2.of(0.194942233447, 0.0));
-
-		for (Result<IntValue> result : hits.collect()) {
-			int id = result.f0.getValue();
-			assertEquals(expectedResults.get(id).f0, result.getHubScore().getValue(), 0.000001);
-			assertEquals(expectedResults.get(id).f1, result.getAuthorityScore().getValue(), 0.000001);
-		}
-	}
-
-	@Test
-	public void testWithCompleteGraph()
-			throws Exception {
-		double expectedScore = 1.0 / Math.sqrt(completeGraphVertexCount);
-
-		DataSet<Result<LongValue>> hits = new HITS<LongValue, NullValue, NullValue>(0.000001)
-			.run(completeGraph);
-
-		List<Result<LongValue>> results = hits.collect();
-
-		assertEquals(completeGraphVertexCount, results.size());
-
-		for (Result<LongValue> result : results) {
-			assertEquals(expectedScore, result.getHubScore().getValue(), 0.000001);
-			assertEquals(expectedScore, result.getAuthorityScore().getValue(), 0.000001);
-		}
-	}
-
-	/*
-	 * This test result can be verified with the following Python script.
-
-	import math
-	import networkx as nx
-
-	graph=nx.read_edgelist('directedRMatGraph.csv', delimiter=',', create_using=nx.DiGraph())
-	hits=nx.algorithms.link_analysis.hits(graph)
-
-	hubbiness_norm=math.sqrt(sum(v*v for v in hits[0].values()))
-	authority_norm=math.sqrt(sum(v*v for v in hits[1].values()))
-
-	for key in [0, 1, 2, 8, 13, 29, 109, 394, 652, 1020]:
-		print('{}: {}, {}'.format(key, hits[0][str(key)]/hubbiness_norm, hits[1][str(key)]/authority_norm))
-	 */
-	@Test
-	public void testWithRMatGraph()
-			throws Exception {
-		DataSet<Result<LongValue>> hits = directedRMatGraph(10, 16)
-			.run(new HITS<LongValue, NullValue, NullValue>(0.000001));
-
-		Map<Long, Result<LongValue>> results = new HashMap<>();
-		for (Result<LongValue> result :  new Collect<Result<LongValue>>().run(hits).execute()) {
-			results.put(result.f0.getValue(), result);
-		}
-
-		assertEquals(902, results.size());
-
-		Map<Long, Tuple2<Double, Double>> expectedResults = new HashMap<>();
-		// a pseudo-random selection of results, both high and low
-		expectedResults.put(0L, Tuple2.of(0.231077034747, 0.238110214937));
-		expectedResults.put(1L, Tuple2.of(0.162364053933, 0.169679504287));
-		expectedResults.put(2L, Tuple2.of(0.162412612499, 0.161015667261));
-		expectedResults.put(8L, Tuple2.of(0.167064641724, 0.158592966505));
-		expectedResults.put(13L, Tuple2.of(0.041915595624, 0.0407091625629));
-		expectedResults.put(29L, Tuple2.of(0.0102017346511, 0.0146218045999));
-		expectedResults.put(109L, Tuple2.of(0.00190531000389, 0.00481944993023));
-		expectedResults.put(394L, Tuple2.of(0.0122287016161, 0.0147987969538));
-		expectedResults.put(652L, Tuple2.of(0.010966659242, 0.0113713306749));
-		expectedResults.put(1020L, Tuple2.of(0.0, 0.000326973732127));
-
-		for (Map.Entry<Long, Tuple2<Double, Double>> expected : expectedResults.entrySet()) {
-			double hubScore = results.get(expected.getKey()).getHubScore().getValue();
-			double authorityScore = results.get(expected.getKey()).getAuthorityScore().getValue();
-
-			assertEquals(expected.getValue().f0, hubScore, 0.00001);
-			assertEquals(expected.getValue().f1, authorityScore, 0.00001);
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/PageRankTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/PageRankTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/PageRankTest.java
deleted file mode 100644
index fc7e485..0000000
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/link_analysis/PageRankTest.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.library.link_analysis;
-
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.graph.asm.AsmTestBase;
-import org.apache.flink.graph.asm.dataset.Collect;
-import org.apache.flink.graph.library.link_analysis.PageRank.Result;
-import org.apache.flink.types.DoubleValue;
-import org.apache.flink.types.IntValue;
-import org.apache.flink.types.LongValue;
-import org.apache.flink.types.NullValue;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-public class PageRankTest
-extends AsmTestBase {
-
-	private static final double DAMPING_FACTOR = 0.85;
-
-	/*
-	 * This test result can be verified with the following Python script.
-
-	import networkx as nx
-
-	graph=nx.read_edgelist('directedSimpleGraph.csv', delimiter=',', create_using=nx.DiGraph())
-	pagerank=nx.algorithms.link_analysis.pagerank(graph)
-
-	for key in sorted(pagerank):
-		print('{}: {}'.format(key, pagerank[key]))
-	 */
-	@Test
-	public void testWithSimpleGraph()
-			throws Exception {
-		DataSet<Result<IntValue>> pr = new PageRank<IntValue, NullValue, NullValue>(DAMPING_FACTOR, 10)
-			.run(directedSimpleGraph);
-
-		List<Double> expectedResults = new ArrayList<>();
-		expectedResults.add(0.09091296131286301);
-		expectedResults.add(0.27951855944178117);
-		expectedResults.add(0.12956847924535586);
-		expectedResults.add(0.22329643739217675);
-		expectedResults.add(0.18579060129496028);
-		expectedResults.add(0.09091296131286301);
-
-		for (Tuple2<IntValue, DoubleValue> result : pr.collect()) {
-			int id = result.f0.getValue();
-			assertEquals(expectedResults.get(id), result.f1.getValue(), 0.000001);
-		}
-	}
-
-	@Test
-	public void testWithCompleteGraph()
-			throws Exception {
-		double expectedScore = 1.0 / completeGraphVertexCount;
-
-		DataSet<Result<LongValue>> pr = new PageRank<LongValue, NullValue, NullValue>(DAMPING_FACTOR, 0.000001)
-			.run(completeGraph);
-
-		List<Result<LongValue>> results = pr.collect();
-
-		assertEquals(completeGraphVertexCount, results.size());
-
-		for (Tuple2<LongValue, DoubleValue> result : results) {
-			assertEquals(expectedScore, result.f1.getValue(), 0.000001);
-		}
-	}
-
-	/*
-	 * This test result can be verified with the following Python script.
-
-	import networkx as nx
-
-	graph=nx.read_edgelist('directedRMatGraph.csv', delimiter=',', create_using=nx.DiGraph())
-	pagerank=nx.algorithms.link_analysis.pagerank(graph)
-
-	for key in [0, 1, 2, 8, 13, 29, 109, 394, 652, 1020]:
-		print('{}: {}'.format(key, pagerank[str(key)]))
-	 */
-	@Test
-	public void testWithRMatGraph()
-			throws Exception {
-		DataSet<Result<LongValue>> pr = new PageRank<LongValue, NullValue, NullValue>(DAMPING_FACTOR, 0.000001)
-			.run(directedRMatGraph(10, 16));
-
-		Map<Long, Result<LongValue>> results = new HashMap<>();
-		for (Result<LongValue> result :  new Collect<Result<LongValue>>().run(pr).execute()) {
-			results.put(result.getVertexId0().getValue(), result);
-		}
-
-		assertEquals(902, results.size());
-
-		Map<Long, Double> expectedResults = new HashMap<>();
-		// a pseudo-random selection of results, both high and low
-		expectedResults.put(0L, 0.027111807822);
-		expectedResults.put(1L, 0.0132842310382);
-		expectedResults.put(2L, 0.0121818392504);
-		expectedResults.put(8L, 0.0115916809743);
-		expectedResults.put(13L, 0.00183249490033);
-		expectedResults.put(29L, 0.000848095047082);
-		expectedResults.put(109L, 0.000308507844048);
-		expectedResults.put(394L, 0.000828743280246);
-		expectedResults.put(652L, 0.000684102931253);
-		expectedResults.put(1020L, 0.000250487135148);
-
-		for (Map.Entry<Long, Double> expected : expectedResults.entrySet()) {
-			double value = results.get(expected.getKey()).getPageRankScore().getValue();
-
-			assertEquals(expected.getValue(), value, 0.00001);
-		}
-	}
-}


[08/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriadicCensus.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriadicCensus.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriadicCensus.java
index 5f28605..949dd4c 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriadicCensus.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriadicCensus.java
@@ -18,12 +18,10 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.LongCounter;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
@@ -32,6 +30,9 @@ import org.apache.flink.graph.library.clustering.directed.TriadicCensus.Result;
 import org.apache.flink.types.CopyableValue;
 import org.apache.flink.util.Preconditions;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 import java.math.BigInteger;
 import java.text.NumberFormat;
@@ -41,15 +42,15 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 /**
  * A triad is formed by three connected or unconnected vertices in a graph.
  * The triadic census counts the occurrences of each type of triad.
- * <p>
- * http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
+ *
+ * <p>See http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
  *
  * @param <K> graph ID type
  * @param <VV> vertex value type
  * @param <EV> edge value type
  */
 public class TriadicCensus<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private TriangleListingHelper<K> triangleListingHelper;
 
@@ -101,24 +102,24 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		BigInteger three = BigInteger.valueOf(3);
 		BigInteger six = BigInteger.valueOf(6);
 
-		BigInteger vertexCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "vc"));
-		BigInteger unidirectionalEdgeCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "uec") / 2);
-		BigInteger bidirectionalEdgeCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "bec") / 2);
-		BigInteger triplet021dCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "021d"));
-		BigInteger triplet021uCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "021u"));
-		BigInteger triplet021cCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "021c"));
-		BigInteger triplet111dCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "111d"));
-		BigInteger triplet111uCount = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "111u"));
-		BigInteger triplet201Count = BigInteger.valueOf((Long)vertexDegreesHelper.getAccumulator(env, "201"));
+		BigInteger vertexCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "vc"));
+		BigInteger unidirectionalEdgeCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "uec") / 2);
+		BigInteger bidirectionalEdgeCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "bec") / 2);
+		BigInteger triplet021dCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "021d"));
+		BigInteger triplet021uCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "021u"));
+		BigInteger triplet021cCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "021c"));
+		BigInteger triplet111dCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "111d"));
+		BigInteger triplet111uCount = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "111u"));
+		BigInteger triplet201Count = BigInteger.valueOf((Long) vertexDegreesHelper.getAccumulator(env, "201"));
 
 		// triads with three connecting edges = closed triplet = triangle
-		BigInteger triangle030tCount = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "030t"));
-		BigInteger triangle030cCount = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "030c"));
-		BigInteger triangle120dCount = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "120d"));
-		BigInteger triangle120uCount = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "120u"));
-		BigInteger triangle120cCount = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "120c"));
-		BigInteger triangle210Count = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "210"));
-		BigInteger triangle300Count = BigInteger.valueOf((Long)triangleListingHelper.getAccumulator(env, "300"));
+		BigInteger triangle030tCount = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "030t"));
+		BigInteger triangle030cCount = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "030c"));
+		BigInteger triangle120dCount = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "120d"));
+		BigInteger triangle120uCount = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "120u"));
+		BigInteger triangle120cCount = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "120c"));
+		BigInteger triangle210Count = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "210"));
+		BigInteger triangle300Count = BigInteger.valueOf((Long) triangleListingHelper.getAccumulator(env, "300"));
 
 		// triads with two connecting edges = open triplet;
 		// each triangle deducts the count of three triplets
@@ -236,7 +237,7 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 			long triangle210Count = 0;
 			long triangle300tCount = 0;
 
-			for (int i = 0 ; i < typeTable.length ; i++) {
+			for (int i = 0; i < typeTable.length; i++) {
 				if (typeTable[i] == 9) {
 					triangle030tCount += triangleCount[i];
 				} else if (typeTable[i] == 10) {
@@ -509,7 +510,7 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		/**
 		 * Get the array of counts.
 		 *
-		 * The order of the counts is from least to most connected:
+		 * <p>The order of the counts is from least to most connected:
 		 *   003, 012, 102, 021d, 021u, 021c, 111d, 111u,
 		 *   030t, 030c, 201, 120d, 120u, 120c, 210, 300
 		 *
@@ -550,11 +551,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(counts, rhs.counts)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java
index 582c4b5..38b0746 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/TriangleListing.java
@@ -53,13 +53,13 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
  * Generates a listing of distinct triangles from the input graph.
- * <p>
- * A triangle is a 3-clique with vertices A, B, and C connected by edges
+ *
+ * <p>A triangle is a 3-clique with vertices A, B, and C connected by edges
  * (A, B), (A, C), and (B, C).
- * <p>
- * The input graph must not contain duplicate edges or self-loops.
- * <p>
- * This algorithm is similar to the undirected version but also tracks and
+ *
+ * <p>The input graph must not contain duplicate edges or self-loops.
+ *
+ * <p>This algorithm is similar to the undirected version but also tracks and
  * computes a bitmask representing the six potential graph edges connecting
  * the triangle vertices.
  *
@@ -112,7 +112,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! TriangleListing.class.isAssignableFrom(other.getClass())) {
+		if (!TriangleListing.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -258,9 +258,9 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	 */
 	private static final class OrderByDegree<T extends Comparable<T>, ET>
 	implements MapFunction<Edge<T, Tuple3<ET, Degrees, Degrees>>, Tuple3<T, T, ByteValue>> {
-		private ByteValue forward = new ByteValue((byte)(EdgeOrder.FORWARD.getBitmask() << 2));
+		private ByteValue forward = new ByteValue((byte) (EdgeOrder.FORWARD.getBitmask() << 2));
 
-		private ByteValue reverse = new ByteValue((byte)(EdgeOrder.REVERSE.getBitmask() << 2));
+		private ByteValue reverse = new ByteValue((byte) (EdgeOrder.REVERSE.getBitmask() << 2));
 
 		private Tuple3<T, T, ByteValue> output = new Tuple3<>();
 
@@ -319,17 +319,17 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 					Tuple2<T, ByteValue> previous = visited.get(i);
 
 					output.f1 = previous.f0;
-					output.f3.setValue((byte)(previous.f1.getValue() | bitmask));
+					output.f3.setValue((byte) (previous.f1.getValue() | bitmask));
 
 					// u, v, w, bitmask
 					out.collect(output);
 				}
 
-				if (! iter.hasNext()) {
+				if (!iter.hasNext()) {
 					break;
 				}
 
-				byte shiftedBitmask = (byte)(bitmask << 2);
+				byte shiftedBitmask = (byte) (bitmask << 2);
 
 				if (visitedCount == visited.size()) {
 					visited.add(new Tuple2<>(edge.f1.copy(), new ByteValue(shiftedBitmask)));
@@ -361,7 +361,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			output.f0 = triplet.f0;
 			output.f1 = triplet.f1;
 			output.f2 = triplet.f2;
-			output.f3.setValue((byte)(triplet.f3.getValue() | edge.f2.getValue()));
+			output.f3.setValue((byte) (triplet.f3.getValue() | edge.f2.getValue()));
 			return output;
 		}
 	}
@@ -381,26 +381,26 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			if (value.f0.compareTo(value.f1) > 0) {
 				byte bitmask = value.f3.getValue();
 
-				T temp_val = value.f0;
+				T tempVal = value.f0;
 				value.f0 = value.f1;
 
-				if (temp_val.compareTo(value.f2) < 0) {
-					value.f1 = temp_val;
+				if (tempVal.compareTo(value.f2) < 0) {
+					value.f1 = tempVal;
 
 					int f0f1 = ((bitmask & 0b100000) >>> 1) | ((bitmask & 0b010000) << 1);
 					int f0f2 = (bitmask & 0b001100) >>> 2;
 					int f1f2 = (bitmask & 0b000011) << 2;
 
-					value.f3.setValue((byte)(f0f1 | f0f2 | f1f2));
+					value.f3.setValue((byte) (f0f1 | f0f2 | f1f2));
 				} else {
 					value.f1 = value.f2;
-					value.f2 = temp_val;
+					value.f2 = tempVal;
 
 					int f0f1 = (bitmask & 0b000011) << 4;
 					int f0f2 = ((bitmask & 0b100000) >>> 3) | ((bitmask & 0b010000) >>> 1);
 					int f1f2 = ((bitmask & 0b001000) >>> 3) | ((bitmask & 0b000100) >>> 1);
 
-					value.f3.setValue((byte)(f0f1 | f0f2 | f1f2));
+					value.f3.setValue((byte) (f0f1 | f0f2 | f1f2));
 				}
 			}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficient.java
index e01892b..c2c2ad2 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/AverageClusteringCoefficient.java
@@ -18,18 +18,19 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.DoubleCounter;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient.Result;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
@@ -43,7 +44,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class AverageClusteringCoefficient<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String VERTEX_COUNT = "vertexCount";
 
@@ -181,11 +182,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(vertexCount, rhs.vertexCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficient.java
index 2eac620..5377c1f 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/GlobalClusteringCoefficient.java
@@ -18,17 +18,18 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.dataset.Count;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient.Result;
 import org.apache.flink.graph.library.metric.undirected.VertexMetrics;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
@@ -40,7 +41,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class GlobalClusteringCoefficient<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private Count<TriangleListing.Result<K>> triangleCount;
 
@@ -141,13 +142,13 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		 * number of closed triplets (triangles) divided by the total number of
 		 * triplets.
 		 *
-		 * A score of {@code Double.NaN} is returned for a graph of isolated vertices
+		 * <p>A score of {@code Double.NaN} is returned for a graph of isolated vertices
 		 * for which both the triangle count and number of neighbors are zero.
 		 *
 		 * @return global clustering coefficient score
 		 */
 		public double getGlobalClusteringCoefficientScore() {
-			return (tripletCount == 0) ? Double.NaN : triangleCount / (double)tripletCount;
+			return (tripletCount == 0) ? Double.NaN : triangleCount / (double) tripletCount;
 		}
 
 		@Override
@@ -167,11 +168,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(tripletCount, rhs.tripletCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java
index 10f7aba..e94310f 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/LocalClusteringCoefficient.java
@@ -34,7 +34,7 @@ import org.apache.flink.graph.asm.degree.annotate.undirected.VertexDegree;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.asm.result.UnaryResult;
 import org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient.Result;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.types.CopyableValue;
@@ -48,12 +48,12 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * The local clustering coefficient measures the connectedness of each vertex's
  * neighborhood. Scores range from 0.0 (no edges between neighbors) to 1.0
  * (neighborhood is a clique).
- * <p>
- * An edge between a vertex's neighbors is a triangle. Counting edges between
+ *
+ * <p>An edge between a vertex's neighbors is a triangle. Counting edges between
  * neighbors is equivalent to counting the number of triangles which include
  * the vertex.
- * <p>
- * The input graph must be a simple, undirected graph containing no duplicate
+ *
+ * <p>The input graph must be a simple, undirected graph containing no duplicate
  * edges or self-loops.
  *
  * @param <K> graph ID type
@@ -107,7 +107,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! LocalClusteringCoefficient.class.isAssignableFrom(other.getClass())) {
+		if (!LocalClusteringCoefficient.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -247,7 +247,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	implements PrintableResult, UnaryResult<T> {
 		private static final int HASH_SEED = 0xc23937c1;
 
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
 
 		@Override
 		public T getVertexId0() {
@@ -283,7 +283,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 		 * number of edges between neighbors, equal to the triangle count,
 		 * divided by the number of potential edges between neighbors.
 		 *
-		 * A score of {@code Double.NaN} is returned for a vertex with degree 1
+		 * <p>A score of {@code Double.NaN} is returned for a vertex with degree 1
 		 * for which both the triangle count and number of neighbors are zero.
 		 *
 		 * @return local clustering coefficient score
@@ -292,7 +292,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			long degree = getDegree().getValue();
 			long neighborPairs = degree * (degree - 1) / 2;
 
-			return (neighborPairs == 0) ? Double.NaN : getTriangleCount().getValue() / (double)neighborPairs;
+			return (neighborPairs == 0) ? Double.NaN : getTriangleCount().getValue() / (double) neighborPairs;
 		}
 
 		/**

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensus.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensus.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensus.java
index 604621d..c5a323d 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensus.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriadicCensus.java
@@ -18,11 +18,9 @@
 
 package org.apache.flink.graph.library.clustering.undirected;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.dataset.Count;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.clustering.undirected.TriadicCensus.Result;
@@ -30,6 +28,9 @@ import org.apache.flink.graph.library.metric.undirected.VertexMetrics;
 import org.apache.flink.types.CopyableValue;
 import org.apache.flink.util.Preconditions;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.math.BigInteger;
 import java.text.NumberFormat;
 
@@ -38,18 +39,18 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 /**
  * A triad is formed by three connected or unconnected vertices in a graph.
  * The triadic census counts the occurrences of each type of triad.
- * <p>
- * The four types of undirected triads are formed with 0, 1, 2, or 3
+ *
+ * <p>The four types of undirected triads are formed with 0, 1, 2, or 3
  * connecting edges.
- * <p>
- * http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
+ *
+ * <p>See http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf
  *
  * @param <K> graph ID type
  * @param <VV> vertex value type
  * @param <EV> edge value type
  */
 public class TriadicCensus<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private Count<TriangleListing.Result<K>> triangleCount;
 
@@ -203,7 +204,7 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		/**
 		 * Get the array of counts.
 		 *
-		 * The order of the counts is from least to most connected:
+		 * <p>The order of the counts is from least to most connected:
 		 *   03, 12, 21, 30
 		 *
 		 * @return array of counts
@@ -231,11 +232,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(counts, rhs.counts)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java
index ee8dbaf..b281473 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/undirected/TriangleListing.java
@@ -33,9 +33,9 @@ import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.asm.degree.annotate.undirected.EdgeDegreePair;
-import org.apache.flink.graph.library.clustering.undirected.TriangleListing.Result;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.asm.result.TertiaryResult;
+import org.apache.flink.graph.library.clustering.undirected.TriangleListing.Result;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.types.CopyableValue;
@@ -51,16 +51,17 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
  * Generates a listing of distinct triangles from the input graph.
- * <p>
- * A triangle is a 3-cycle with vertices A, B, and C connected by edges
+ *
+ * <p>A triangle is a 3-cycle with vertices A, B, and C connected by edges
  * (A, B), (A, C), and (B, C).
- * <p>
- * The input graph must be a simple, undirected graph containing no duplicate
+ *
+ * <p>The input graph must be a simple, undirected graph containing no duplicate
  * edges or self-loops.
- * <p>
- * Algorithm from "Finding, Counting and Listing all Triangles in Large Graphs,
+ *
+ * <p>Algorithm from "Finding, Counting and Listing all Triangles in Large Graphs,
  * An Experimental Study", Thomas Schank and Dorothea Wagner.
- * http://i11www.iti.uni-karlsruhe.de/extra/publications/sw-fclt-05_t.pdf
+ *
+ * <p>See http://i11www.iti.uni-karlsruhe.de/extra/publications/sw-fclt-05_t.pdf
  *
  * @param <K> graph ID type
  * @param <VV> vertex value type
@@ -111,7 +112,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! TriangleListing.class.isAssignableFrom(other.getClass())) {
+		if (!TriangleListing.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -182,8 +183,8 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	/**
 	 * Removes edge values while filtering such that only edges where the
 	 * source vertex ID compares less than the target vertex ID are emitted.
-	 * <p>
-	 * Since the input graph is a simple graph this filter removes exactly half
+	 *
+	 * <p>Since the input graph is a simple graph this filter removes exactly half
 	 * of the original edges.
 	 *
 	 * @param <T> ID type
@@ -210,8 +211,8 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	 * vertex has lower degree are emitted. If the source and target vertex
 	 * degrees are equal then the edge is emitted if the source vertex ID
 	 * compares less than the target vertex ID.
-	 * <p>
-	 * Since the input graph is a simple graph this filter removes exactly half
+	 *
+	 * <p>Since the input graph is a simple graph this filter removes exactly half
 	 * of the original edges.
 	 *
 	 * @param <T> ID type
@@ -270,7 +271,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 					out.collect(output);
 				}
 
-				if (! iter.hasNext()) {
+				if (!iter.hasNext()) {
 					break;
 				}
 
@@ -319,14 +320,14 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				throws Exception {
 			// by the triangle listing algorithm we know f1 < f2
 			if (value.f0.compareTo(value.f1) > 0) {
-				T temp_val = value.f0;
+				T tempVal = value.f0;
 				value.f0 = value.f1;
 
-				if (temp_val.compareTo(value.f2) <= 0) {
-					value.f1 = temp_val;
+				if (tempVal.compareTo(value.f2) <= 0) {
+					value.f1 = tempVal;
 				} else {
 					value.f1 = value.f2;
-					value.f2 = temp_val;
+					value.f2 = tempVal;
 				}
 			}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/Functions.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/Functions.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/Functions.java
deleted file mode 100644
index a7d6ef1..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/Functions.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.library.link_analysis;
-
-import org.apache.flink.api.common.functions.ReduceFunction;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.types.DoubleValue;
-
-class Functions {
-
-	private Functions() {}
-
-	/**
-	 * Sum vertices' scores.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	protected static final class SumScore<T>
-		implements ReduceFunction<Tuple2<T, DoubleValue>> {
-		@Override
-		public Tuple2<T, DoubleValue> reduce(Tuple2<T, DoubleValue> left, Tuple2<T, DoubleValue> right)
-			throws Exception {
-			left.f1.setValue(left.f1.getValue() + right.f1.getValue());
-			return left;
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/HITS.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/HITS.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/HITS.java
deleted file mode 100644
index ba1ab21..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/HITS.java
+++ /dev/null
@@ -1,582 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.library.link_analysis;
-
-import org.apache.flink.api.common.aggregators.ConvergenceCriterion;
-import org.apache.flink.api.common.aggregators.DoubleSumAggregator;
-import org.apache.flink.api.common.functions.CoGroupFunction;
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.common.functions.ReduceFunction;
-import org.apache.flink.api.common.functions.RichJoinFunction;
-import org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint;
-import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond;
-import org.apache.flink.api.java.operators.IterativeDataSet;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.tuple.Tuple3;
-import org.apache.flink.configuration.Configuration;
-import org.apache.flink.graph.Edge;
-import org.apache.flink.graph.Graph;
-import org.apache.flink.graph.asm.result.PrintableResult;
-import org.apache.flink.graph.asm.result.UnaryResult;
-import org.apache.flink.graph.library.link_analysis.Functions.SumScore;
-import org.apache.flink.graph.library.link_analysis.HITS.Result;
-import org.apache.flink.graph.utils.Murmur3_32;
-import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
-import org.apache.flink.types.DoubleValue;
-import org.apache.flink.util.Collector;
-import org.apache.flink.util.Preconditions;
-
-import java.util.Collection;
-
-import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
-
-/**
- * Hyperlink-Induced Topic Search computes two interdependent scores for every
- * vertex in a directed graph. A good "hub" links to good "authorities" and
- * good "authorities" are linked from good "hubs".
- * <p>
- * This algorithm can be configured to terminate either by a limit on the number
- * of iterations, a convergence threshold, or both.
- * <p>
- * http://www.cs.cornell.edu/home/kleinber/auth.pdf
- *
- * @param <K> graph ID type
- * @param <VV> vertex value type
- * @param <EV> edge value type
- */
-public class HITS<K, VV, EV>
-extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
-
-	private static final String CHANGE_IN_SCORES = "change in scores";
-
-	private static final String HUBBINESS_SUM_SQUARED = "hubbiness sum squared";
-
-	private static final String AUTHORITY_SUM_SQUARED = "authority sum squared";
-
-	// Required configuration
-	private int maxIterations;
-
-	private double convergenceThreshold;
-
-	// Optional configuration
-	private int parallelism = PARALLELISM_DEFAULT;
-
-	/**
-	 * Hyperlink-Induced Topic Search with a fixed number of iterations.
-	 *
-	 * @param iterations fixed number of iterations
-	 */
-	public HITS(int iterations) {
-		this(iterations, Double.MAX_VALUE);
-	}
-
-	/**
-	 * Hyperlink-Induced Topic Search with a convergence threshold. The algorithm
-	 * terminates when the total change in hub and authority scores over all
-	 * vertices falls to or below the given threshold value.
-	 *
-	 * @param convergenceThreshold convergence threshold for sum of scores
-	 */
-	public HITS(double convergenceThreshold) {
-		this(Integer.MAX_VALUE, convergenceThreshold);
-	}
-
-	/**
-	 * Hyperlink-Induced Topic Search with a convergence threshold and a maximum
-	 * iteration count. The algorithm terminates after either the given number
-	 * of iterations or when the total change in hub and authority scores over all
-	 * vertices falls to or below the given threshold value.
-	 *
-	 * @param maxIterations maximum number of iterations
-	 * @param convergenceThreshold convergence threshold for sum of scores
-	 */
-	public HITS(int maxIterations, double convergenceThreshold) {
-		Preconditions.checkArgument(maxIterations > 0, "Number of iterations must be greater than zero");
-		Preconditions.checkArgument(convergenceThreshold > 0.0, "Convergence threshold must be greater than zero");
-
-		this.maxIterations = maxIterations;
-		this.convergenceThreshold = convergenceThreshold;
-	}
-
-	/**
-	 * Override the operator parallelism.
-	 *
-	 * @param parallelism operator parallelism
-	 * @return this
-	 */
-	public HITS<K, VV, EV> setParallelism(int parallelism) {
-		this.parallelism = parallelism;
-
-		return this;
-	}
-
-	@Override
-	protected String getAlgorithmName() {
-		return HITS.class.getName();
-	}
-
-	@Override
-	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
-		Preconditions.checkNotNull(other);
-
-		if (! HITS.class.isAssignableFrom(other.getClass())) {
-			return false;
-		}
-
-		HITS rhs = (HITS) other;
-
-		// merge configurations
-
-		maxIterations = Math.max(maxIterations, rhs.maxIterations);
-		convergenceThreshold = Math.min(convergenceThreshold, rhs.convergenceThreshold);
-		parallelism = (parallelism == PARALLELISM_DEFAULT) ? rhs.parallelism :
-			((rhs.parallelism == PARALLELISM_DEFAULT) ? parallelism : Math.min(parallelism, rhs.parallelism));
-
-		return true;
-	}
-
-	@Override
-	public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
-			throws Exception {
-		DataSet<Tuple2<K, K>> edges = input
-			.getEdges()
-			.map(new ExtractEdgeIDs<K, EV>())
-				.setParallelism(parallelism)
-				.name("Extract edge IDs");
-
-		// ID, hub, authority
-		DataSet<Tuple3<K, DoubleValue, DoubleValue>> initialScores = edges
-			.map(new InitializeScores<K>())
-				.setParallelism(parallelism)
-				.name("Initial scores")
-			.groupBy(0)
-			.reduce(new SumScores<K>())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		IterativeDataSet<Tuple3<K, DoubleValue, DoubleValue>> iterative = initialScores
-			.iterate(maxIterations);
-
-		// ID, hubbiness
-		DataSet<Tuple2<K, DoubleValue>> hubbiness = iterative
-			.coGroup(edges)
-			.where(0)
-			.equalTo(1)
-			.with(new Hubbiness<K>())
-				.setParallelism(parallelism)
-				.name("Hub")
-			.groupBy(0)
-			.reduce(new SumScore<K>())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// sum-of-hubbiness-squared
-		DataSet<DoubleValue> hubbinessSumSquared = hubbiness
-			.map(new Square<K>())
-				.setParallelism(parallelism)
-				.name("Square")
-			.reduce(new Sum())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// ID, new authority
-		DataSet<Tuple2<K, DoubleValue>> authority = hubbiness
-			.coGroup(edges)
-			.where(0)
-			.equalTo(0)
-			.with(new Authority<K>())
-				.setParallelism(parallelism)
-				.name("Authority")
-			.groupBy(0)
-			.reduce(new SumScore<K>())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// sum-of-authority-squared
-		DataSet<DoubleValue> authoritySumSquared = authority
-			.map(new Square<K>())
-				.setParallelism(parallelism)
-				.name("Square")
-			.reduce(new Sum())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// ID, normalized hubbiness, normalized authority
-		DataSet<Tuple3<K, DoubleValue, DoubleValue>> scores = hubbiness
-			.fullOuterJoin(authority, JoinHint.REPARTITION_SORT_MERGE)
-			.where(0)
-			.equalTo(0)
-			.with(new JoinAndNormalizeHubAndAuthority<K>())
-			.withBroadcastSet(hubbinessSumSquared, HUBBINESS_SUM_SQUARED)
-			.withBroadcastSet(authoritySumSquared, AUTHORITY_SUM_SQUARED)
-				.setParallelism(parallelism)
-				.name("Join scores");
-
-		DataSet<Tuple3<K, DoubleValue, DoubleValue>> passThrough;
-
-		if (convergenceThreshold < Double.MAX_VALUE) {
-			passThrough = iterative
-				.fullOuterJoin(scores, JoinHint.REPARTITION_SORT_MERGE)
-				.where(0)
-				.equalTo(0)
-				.with(new ChangeInScores<K>())
-					.setParallelism(parallelism)
-					.name("Change in scores");
-
-			iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
-		} else {
-			passThrough = scores;
-		}
-
-		return iterative
-			.closeWith(passThrough)
-			.map(new TranslateResult<K>())
-				.setParallelism(parallelism)
-				.name("Map result");
-	}
-
-	/**
-	 * Map edges and remove the edge value.
-	 *
-	 * @param <T> ID type
-	 * @param <ET> edge value type
-	 *
-	 * @see Graph.ExtractEdgeIDsMapper
-	 */
-	@ForwardedFields("0; 1")
-	private static class ExtractEdgeIDs<T, ET>
-	implements MapFunction<Edge<T, ET>, Tuple2<T, T>> {
-		private Tuple2<T, T> output = new Tuple2<>();
-
-		@Override
-		public Tuple2<T, T> map(Edge<T, ET> value)
-				throws Exception {
-			output.f0 = value.f0;
-			output.f1 = value.f1;
-			return output;
-		}
-	}
-
-	/**
-	 * Initialize vertices' authority scores by assigning each vertex with an
-	 * initial hub score of 1.0. The hub scores are initialized to zero since
-	 * these will be computed based on the initial authority scores.
-	 *
-	 * The initial scores are non-normalized.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("1->0")
-	private static class InitializeScores<T>
-	implements MapFunction<Tuple2<T, T>, Tuple3<T, DoubleValue, DoubleValue>> {
-		private Tuple3<T, DoubleValue, DoubleValue> output = new Tuple3<>(null, new DoubleValue(0.0), new DoubleValue(1.0));
-
-		@Override
-		public Tuple3<T, DoubleValue, DoubleValue> map(Tuple2<T, T> value) throws Exception {
-			output.f0 = value.f1;
-			return output;
-		}
-	}
-
-	/**
-	 * Sum vertices' hub and authority scores.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	private static class SumScores<T>
-	implements ReduceFunction<Tuple3<T, DoubleValue, DoubleValue>> {
-		@Override
-		public Tuple3<T, DoubleValue, DoubleValue> reduce(Tuple3<T, DoubleValue, DoubleValue> left, Tuple3<T, DoubleValue, DoubleValue> right)
-				throws Exception {
-			left.f1.setValue(left.f1.getValue() + right.f1.getValue());
-			left.f2.setValue(left.f2.getValue() + right.f2.getValue());
-			return left;
-		}
-	}
-
-	/**
-	 * The hub score is the sum of authority scores of vertices on out-edges.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsFirst("2->1")
-	@ForwardedFieldsSecond("0")
-	private static class Hubbiness<T>
-	implements CoGroupFunction<Tuple3<T, DoubleValue, DoubleValue>, Tuple2<T, T>, Tuple2<T, DoubleValue>> {
-		private Tuple2<T, DoubleValue> output = new Tuple2<>();
-
-		@Override
-		public void coGroup(Iterable<Tuple3<T, DoubleValue, DoubleValue>> vertex, Iterable<Tuple2<T, T>> edges, Collector<Tuple2<T, DoubleValue>> out)
-				throws Exception {
-			output.f1 = vertex.iterator().next().f2;
-
-			for (Tuple2<T, T> edge : edges) {
-				output.f0 = edge.f0;
-				out.collect(output);
-			}
-		}
-	}
-
-	/**
-	 * The authority score is the sum of hub scores of vertices on in-edges.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsFirst("1")
-	@ForwardedFieldsSecond("1->0")
-	private static class Authority<T>
-	implements CoGroupFunction<Tuple2<T, DoubleValue>, Tuple2<T, T>, Tuple2<T, DoubleValue>> {
-		private Tuple2<T, DoubleValue> output = new Tuple2<>();
-
-		@Override
-		public void coGroup(Iterable<Tuple2<T, DoubleValue>> vertex, Iterable<Tuple2<T, T>> edges, Collector<Tuple2<T, DoubleValue>> out)
-				throws Exception {
-			output.f1 = vertex.iterator().next().f1;
-
-			for (Tuple2<T, T> edge : edges) {
-				output.f0 = edge.f1;
-				out.collect(output);
-			}
-		}
-	}
-
-	/**
-	 * Compute the square of each score.
-	 *
-	 * @param <T> ID type
-	 */
-	private static class Square<T>
-	implements MapFunction<Tuple2<T, DoubleValue>, DoubleValue> {
-		private DoubleValue output = new DoubleValue();
-
-		@Override
-		public DoubleValue map(Tuple2<T, DoubleValue> value)
-				throws Exception {
-			double val = value.f1.getValue();
-			output.setValue(val * val);
-
-			return output;
-		}
-	}
-
-	/**
-	 * Sum over values. This specialized function is used in place of generic aggregation.
-	 */
-	private static class Sum
-	implements ReduceFunction<DoubleValue> {
-		@Override
-		public DoubleValue reduce(DoubleValue first, DoubleValue second)
-				throws Exception {
-			first.setValue(first.getValue() + second.getValue());
-			return first;
-		}
-	}
-
-	/**
-	 * Join and normalize the hub and authority scores.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsFirst("0")
-	@ForwardedFieldsSecond("0")
-	private static class JoinAndNormalizeHubAndAuthority<T>
-	extends RichJoinFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>> {
-		private Tuple3<T, DoubleValue, DoubleValue> output = new Tuple3<>(null, new DoubleValue(), new DoubleValue());
-
-		private double hubbinessRootSumSquared;
-
-		private double authorityRootSumSquared;
-
-		@Override
-		public void open(Configuration parameters) throws Exception {
-			super.open(parameters);
-
-			Collection<DoubleValue> var;
-			var = getRuntimeContext().getBroadcastVariable(HUBBINESS_SUM_SQUARED);
-			hubbinessRootSumSquared = Math.sqrt(var.iterator().next().getValue());
-
-			var = getRuntimeContext().getBroadcastVariable(AUTHORITY_SUM_SQUARED);
-			authorityRootSumSquared = Math.sqrt(var.iterator().next().getValue());
-		}
-
-		@Override
-		public Tuple3<T, DoubleValue, DoubleValue> join(Tuple2<T, DoubleValue> hubbiness, Tuple2<T, DoubleValue> authority)
-				throws Exception {
-			output.f0 = (authority == null) ? hubbiness.f0 : authority.f0;
-			output.f1.setValue(hubbiness == null ? 0.0 : hubbiness.f1.getValue() / hubbinessRootSumSquared);
-			output.f2.setValue(authority == null ? 0.0 : authority.f1.getValue() / authorityRootSumSquared);
-			return output;
-		}
-	}
-
-	/**
-	 * Computes the total sum of the change in hub and authority scores over
-	 * all vertices between iterations. A negative score is emitted after the
-	 * first iteration to prevent premature convergence.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsFirst("0")
-	@ForwardedFieldsSecond("*")
-	private static class ChangeInScores<T>
-	extends RichJoinFunction<Tuple3<T, DoubleValue, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>> {
-		private boolean isInitialSuperstep;
-
-		private double changeInScores;
-
-		@Override
-		public void open(Configuration parameters)
-				throws Exception {
-			super.open(parameters);
-
-			isInitialSuperstep = (getIterationRuntimeContext().getSuperstepNumber() == 1);
-			changeInScores = (isInitialSuperstep) ? -1.0 : 0.0;
-		}
-
-		@Override
-		public void close()
-				throws Exception {
-			super.close();
-
-			DoubleSumAggregator agg = getIterationRuntimeContext().getIterationAggregator(CHANGE_IN_SCORES);
-			agg.aggregate(changeInScores);
-		}
-
-		@Override
-		public Tuple3<T, DoubleValue, DoubleValue> join(Tuple3<T, DoubleValue, DoubleValue> first, Tuple3<T, DoubleValue, DoubleValue> second)
-				throws Exception {
-			if (! isInitialSuperstep) {
-				changeInScores += Math.abs(second.f1.getValue() - first.f1.getValue());
-				changeInScores += Math.abs(second.f2.getValue() - first.f2.getValue());
-			}
-
-			return second;
-		}
-	}
-
-	/**
-	 * Monitors the total change in hub and authority scores over all vertices.
-	 * The algorithm terminates when the change in scores compared against the
-	 * prior iteration falls to or below the given convergence threshold.
-	 *
-	 * An optimization of this implementation of HITS is to leave the initial
-	 * scores non-normalized; therefore, the change in scores after the first
-	 * superstep cannot be measured and a negative value is emitted to signal
-	 * that the iteration should continue.
-	 */
-	private static class ScoreConvergence
-	implements ConvergenceCriterion<DoubleValue> {
-		private double convergenceThreshold;
-
-		public ScoreConvergence(double convergenceThreshold) {
-			this.convergenceThreshold = convergenceThreshold;
-		}
-
-		@Override
-		public boolean isConverged(int iteration, DoubleValue value) {
-			double val = value.getValue();
-			return (0 <= val && val <= convergenceThreshold);
-		}
-	}
-
-	/**
-	 * Map the Tuple result to the return type.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0; 1; 2")
-	private static class TranslateResult<T>
-	implements MapFunction<Tuple3<T, DoubleValue, DoubleValue>, Result<T>> {
-		private Result<T> output = new Result<>();
-
-		@Override
-		public Result<T> map(Tuple3<T, DoubleValue, DoubleValue> value) throws Exception {
-			output.f0 = value.f0;
-			output.f1 = value.f1;
-			output.f2 = value.f2;
-			return output;
-		}
-	}
-
-	/**
-	 * Wraps the {@link Tuple3} to encapsulate results from the HITS algorithm.
-	 *
-	 * @param <T> ID type
-	 */
-	public static class Result<T>
-	extends Tuple3<T, DoubleValue, DoubleValue>
-	implements PrintableResult, UnaryResult<T> {
-		public static final int HASH_SEED = 0xc7e39a63;
-
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
-
-		@Override
-		public T getVertexId0() {
-			return f0;
-		}
-
-		@Override
-		public void setVertexId0(T value) {
-			f0 = value;
-		}
-
-		/**
-		 * Get the hub score. Good hubs link to good authorities.
-		 *
-		 * @return the hub score
-		 */
-		public DoubleValue getHubScore() {
-			return f1;
-		}
-
-		/**
-		 * Get the authority score. Good authorities link to good hubs.
-		 *
-		 * @return the authority score
-		 */
-		public DoubleValue getAuthorityScore() {
-			return f2;
-		}
-
-		public String toPrintableString() {
-			return "Vertex ID: " + getVertexId0()
-				+ ", hub score: " + getHubScore()
-				+ ", authority score: " + getAuthorityScore();
-		}
-
-		@Override
-		public int hashCode() {
-			return hasher.reset()
-				.hash(f0.hashCode())
-				.hash(f1.getValue())
-				.hash(f2.getValue())
-				.hash();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/PageRank.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/PageRank.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/PageRank.java
deleted file mode 100644
index 747735e..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/link_analysis/PageRank.java
+++ /dev/null
@@ -1,544 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.library.link_analysis;
-
-import org.apache.flink.api.common.aggregators.ConvergenceCriterion;
-import org.apache.flink.api.common.aggregators.DoubleSumAggregator;
-import org.apache.flink.api.common.functions.CoGroupFunction;
-import org.apache.flink.api.common.functions.FlatMapFunction;
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.common.functions.ReduceFunction;
-import org.apache.flink.api.common.functions.RichJoinFunction;
-import org.apache.flink.api.common.functions.RichMapFunction;
-import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst;
-import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond;
-import org.apache.flink.api.java.operators.IterativeDataSet;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.configuration.Configuration;
-import org.apache.flink.graph.Edge;
-import org.apache.flink.graph.Graph;
-import org.apache.flink.graph.Vertex;
-import org.apache.flink.graph.asm.degree.annotate.directed.EdgeSourceDegrees;
-import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees;
-import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
-import org.apache.flink.graph.asm.result.PrintableResult;
-import org.apache.flink.graph.asm.result.UnaryResult;
-import org.apache.flink.graph.library.link_analysis.Functions.SumScore;
-import org.apache.flink.graph.library.link_analysis.PageRank.Result;
-import org.apache.flink.graph.utils.GraphUtils;
-import org.apache.flink.graph.utils.Murmur3_32;
-import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
-import org.apache.flink.types.DoubleValue;
-import org.apache.flink.types.LongValue;
-import org.apache.flink.util.Collector;
-import org.apache.flink.util.Preconditions;
-
-import java.util.Collection;
-import java.util.Iterator;
-
-import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
-
-/**
- * PageRank computes a per-vertex score which is the sum of PageRank scores
- * transmitted over in-edges. Each vertex's score is divided evenly among
- * out-edges. High-scoring vertices are linked to by other high-scoring
- * vertices; this is similar to the 'authority' score in {@link HITS}.
- *
- * http://ilpubs.stanford.edu:8090/422/1/1999-66.pdf
- *
- * @param <K> graph ID type
- * @param <VV> vertex value type
- * @param <EV> edge value type
- */
-public class PageRank<K, VV, EV>
-extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
-
-	private static final String VERTEX_COUNT = "vertex count";
-
-	private static final String SUM_OF_SCORES = "sum of scores";
-
-	private static final String CHANGE_IN_SCORES = "change in scores";
-
-	// Required configuration
-	private final double dampingFactor;
-
-	private int maxIterations;
-
-	private double convergenceThreshold;
-
-	// Optional configuration
-	private int parallelism = PARALLELISM_DEFAULT;
-
-	/**
-	 * PageRank with a fixed number of iterations.
-	 *
-	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
-	 * @param iterations fixed number of iterations
-	 */
-	public PageRank(double dampingFactor, int iterations) {
-		this(dampingFactor, iterations, Double.MAX_VALUE);
-	}
-
-	/**
-	 * PageRank with a convergence threshold. The algorithm terminates when the
-	 * change in score over all vertices falls to or below the given threshold value.
-	 *
-	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
-	 * @param convergenceThreshold convergence threshold for sum of scores
-	 */
-	public PageRank(double dampingFactor, double convergenceThreshold) {
-		this(dampingFactor, Integer.MAX_VALUE, convergenceThreshold);
-	}
-
-	/**
-	 * PageRank with a convergence threshold and a maximum iteration count. The
-	 * algorithm terminates after either the given number of iterations or when
-	 * the change in score over all vertices falls to or below the given
-	 * threshold value.
-	 *
-	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
-	 * @param maxIterations maximum number of iterations
-	 * @param convergenceThreshold convergence threshold for sum of scores
-	 */
-	public PageRank(double dampingFactor, int maxIterations, double convergenceThreshold) {
-		Preconditions.checkArgument(0 < dampingFactor && dampingFactor < 1,
-			"Damping factor must be between zero and one");
-		Preconditions.checkArgument(maxIterations > 0, "Number of iterations must be greater than zero");
-		Preconditions.checkArgument(convergenceThreshold > 0.0, "Convergence threshold must be greater than zero");
-
-		this.dampingFactor = dampingFactor;
-		this.maxIterations = maxIterations;
-		this.convergenceThreshold = convergenceThreshold;
-	}
-
-	/**
-	 * Override the operator parallelism.
-	 *
-	 * @param parallelism operator parallelism
-	 * @return this
-	 */
-	public PageRank<K, VV, EV> setParallelism(int parallelism) {
-		this.parallelism = parallelism;
-
-		return this;
-	}
-
-	@Override
-	protected String getAlgorithmName() {
-		return PageRank.class.getName();
-	}
-
-	@Override
-	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
-		Preconditions.checkNotNull(other);
-
-		if (! PageRank.class.isAssignableFrom(other.getClass())) {
-			return false;
-		}
-
-		PageRank rhs = (PageRank) other;
-
-		// merge configurations
-
-		maxIterations = Math.max(maxIterations, rhs.maxIterations);
-		convergenceThreshold = Math.min(convergenceThreshold, rhs.convergenceThreshold);
-		parallelism = (parallelism == PARALLELISM_DEFAULT) ? rhs.parallelism :
-			((rhs.parallelism == PARALLELISM_DEFAULT) ? parallelism : Math.min(parallelism, rhs.parallelism));
-
-		return true;
-	}
-
-	@Override
-	public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
-			throws Exception {
-		// vertex degree
-		DataSet<Vertex<K, Degrees>> vertexDegree = input
-			.run(new VertexDegrees<K, VV, EV>()
-				.setParallelism(parallelism));
-
-		// vertex count
-		DataSet<LongValue> vertexCount = GraphUtils.count(vertexDegree);
-
-		// s, t, d(s)
-		DataSet<Edge<K, LongValue>> edgeSourceDegree = input
-			.run(new EdgeSourceDegrees<K, VV, EV>()
-				.setParallelism(parallelism))
-			.map(new ExtractSourceDegree<K, EV>())
-				.setParallelism(parallelism)
-				.name("Extract source degree");
-
-		// vertices with zero in-edges
-		DataSet<Tuple2<K, DoubleValue>> sourceVertices = vertexDegree
-			.flatMap(new InitializeSourceVertices<K>())
-			.withBroadcastSet(vertexCount, VERTEX_COUNT)
-				.setParallelism(parallelism)
-				.name("Initialize source vertex scores");
-
-		// s, initial pagerank(s)
-		DataSet<Tuple2<K, DoubleValue>> initialScores = vertexDegree
-			.map(new InitializeVertexScores<K>())
-			.withBroadcastSet(vertexCount, VERTEX_COUNT)
-				.setParallelism(parallelism)
-				.name("Initialize scores");
-
-		IterativeDataSet<Tuple2<K, DoubleValue>> iterative = initialScores
-			.iterate(maxIterations);
-
-		// s, projected pagerank(s)
-		DataSet<Tuple2<K, DoubleValue>> vertexScores = iterative
-			.coGroup(edgeSourceDegree)
-			.where(0)
-			.equalTo(0)
-			.with(new SendScore<K>())
-				.setParallelism(parallelism)
-				.name("Send score")
-			.groupBy(0)
-			.reduce(new SumScore<K>())
-			.setCombineHint(CombineHint.HASH)
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// ignored ID, total pagerank
-		DataSet<Tuple2<K, DoubleValue>> sumOfScores = vertexScores
-			.reduce(new SumVertexScores<K>())
-				.setParallelism(parallelism)
-				.name("Sum");
-
-		// s, adjusted pagerank(s)
-		DataSet<Tuple2<K, DoubleValue>> adjustedScores = vertexScores
-			.union(sourceVertices)
-				.setParallelism(parallelism)
-				.name("Union with source vertices")
-			.map(new AdjustScores<K>(dampingFactor))
-				.withBroadcastSet(sumOfScores, SUM_OF_SCORES)
-				.withBroadcastSet(vertexCount, VERTEX_COUNT)
-					.setParallelism(parallelism)
-					.name("Adjust scores");
-
-		DataSet<Tuple2<K, DoubleValue>> passThrough;
-
-		if (convergenceThreshold < Double.MAX_VALUE) {
-			passThrough = iterative
-				.join(adjustedScores)
-				.where(0)
-				.equalTo(0)
-				.with(new ChangeInScores<K>())
-					.setParallelism(parallelism)
-					.name("Change in scores");
-
-			iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
-		} else {
-			passThrough = adjustedScores;
-		}
-
-		return iterative
-			.closeWith(passThrough)
-			.map(new TranslateResult<K>())
-				.setParallelism(parallelism)
-				.name("Map result");
-	}
-
-	/**
-	 * Remove the unused original edge value and extract the out-degree.
-	 *
-	 * @param <T> ID type
-	 * @param <ET> edge value type
-	 */
-	@ForwardedFields("0; 1")
-	private static class ExtractSourceDegree<T, ET>
-	implements MapFunction<Edge<T, Tuple2<ET, Degrees>>, Edge<T, LongValue>> {
-		Edge<T, LongValue> output = new Edge<>();
-
-		@Override
-		public Edge<T, LongValue> map(Edge<T, Tuple2<ET, Degrees>> edge)
-				throws Exception {
-			output.f0 = edge.f0;
-			output.f1 = edge.f1;
-			output.f2 = edge.f2.f1.getOutDegree();
-			return output;
-		}
-	}
-
-	/**
-	 * Source vertices have no in-edges so have a projected score of 0.0.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	private static class InitializeSourceVertices<T>
-	implements FlatMapFunction<Vertex<T, Degrees>, Tuple2<T, DoubleValue>> {
-		private Tuple2<T, DoubleValue> output = new Tuple2<>(null, new DoubleValue(0.0));
-
-		@Override
-		public void flatMap(Vertex<T, Degrees> vertex, Collector<Tuple2<T, DoubleValue>> out)
-				throws Exception {
-			if (vertex.f1.getInDegree().getValue() == 0) {
-				output.f0 = vertex.f0;
-				out.collect(output);
-			}
-		}
-	}
-
-	/**
-	 * PageRank scores sum to 1.0 so initialize each vertex with the inverse of
-	 * the number of vertices.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	private static class InitializeVertexScores<T>
-	extends RichMapFunction<Vertex<T, Degrees>, Tuple2<T, DoubleValue>> {
-		private Tuple2<T, DoubleValue> output = new Tuple2<>();
-
-		@Override
-		public void open(Configuration parameters)
-				throws Exception {
-			super.open(parameters);
-
-			Collection<LongValue> vertexCount = getRuntimeContext().getBroadcastVariable(VERTEX_COUNT);
-			output.f1 = new DoubleValue(1.0 / vertexCount.iterator().next().getValue());
-		}
-
-		@Override
-		public Tuple2<T, DoubleValue> map(Vertex<T, Degrees> vertex)
-				throws Exception {
-			output.f0 = vertex.f0;
-			return output;
-		}
-	}
-
-	/**
-	 * The PageRank score for each vertex is divided evenly and projected to
-	 * neighbors on out-edges.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsSecond("1->0")
-	private static class SendScore<T>
-	implements CoGroupFunction<Tuple2<T, DoubleValue>, Edge<T, LongValue>, Tuple2<T, DoubleValue>> {
-		private Tuple2<T, DoubleValue> output = new Tuple2<>(null, new DoubleValue());
-
-		@Override
-		public void coGroup(Iterable<Tuple2<T, DoubleValue>> vertex, Iterable<Edge<T, LongValue>> edges, Collector<Tuple2<T, DoubleValue>> out)
-				throws Exception {
-			Iterator<Edge<T, LongValue>> edgeIterator = edges.iterator();
-
-			if (edgeIterator.hasNext()) {
-				Edge<T, LongValue> edge = edgeIterator.next();
-
-				output.f0 = edge.f1;
-				output.f1.setValue(vertex.iterator().next().f1.getValue() / edge.f2.getValue());
-				out.collect(output);
-
-				while (edgeIterator.hasNext()) {
-					edge = edgeIterator.next();
-					output.f0 = edge.f1;
-					out.collect(output);
-				}
-			}
-		}
-	}
-
-	/**
-	 * Sum the PageRank score over all vertices. The vertex ID must be ignored
-	 * but is retained rather than adding another operator.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	private static class SumVertexScores<T>
-	implements ReduceFunction<Tuple2<T, DoubleValue>> {
-		@Override
-		public Tuple2<T, DoubleValue> reduce(Tuple2<T, DoubleValue> first, Tuple2<T, DoubleValue> second)
-				throws Exception {
-			first.f1.setValue(first.f1.getValue() + second.f1.getValue());
-			return first;
-		}
-	}
-
-	/**
-	 * Each iteration the per-vertex scores are adjusted with the damping
-	 * factor. Each score is multiplied by the damping factor then added to the
-	 * probability of a "random hop", which is one minus the damping factor.
-	 *
-	 * This operation also accounts for 'sink' vertices, which have no
-	 * out-edges to project score to. The sink scores are computed by taking
-	 * one minus the sum of vertex scores, which also includes precision error.
-	 * This 'missing' score is evenly distributed across vertices as with the
-	 * random hop.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0")
-	private static class AdjustScores<T>
-	extends RichMapFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>> {
-		private double dampingFactor;
-
-		private long vertexCount;
-
-		private double uniformlyDistributedScore;
-
-		public AdjustScores(double dampingFactor) {
-			this.dampingFactor = dampingFactor;
-		}
-
-		@Override
-		public void open(Configuration parameters) throws Exception {
-			super.open(parameters);
-
-			Collection<Tuple2<T, DoubleValue>> sumOfScores = getRuntimeContext().getBroadcastVariable(SUM_OF_SCORES);
-			// floating point precision error is also included in sumOfSinks
-			double sumOfSinks = 1 - sumOfScores.iterator().next().f1.getValue();
-
-			Collection<LongValue> vertexCount = getRuntimeContext().getBroadcastVariable(VERTEX_COUNT);
-			this.vertexCount = vertexCount.iterator().next().getValue();
-
-			this.uniformlyDistributedScore = ((1 - dampingFactor) + dampingFactor * sumOfSinks) / this.vertexCount;
-		}
-
-		@Override
-		public Tuple2<T, DoubleValue> map(Tuple2<T, DoubleValue> value) throws Exception {
-			value.f1.setValue(uniformlyDistributedScore + (dampingFactor * value.f1.getValue()));
-			return value;
-		}
-	}
-
-	/**
-	 * Computes the sum of the absolute change in vertex PageRank scores
-	 * between iterations.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFieldsFirst("0")
-	@ForwardedFieldsSecond("*")
-	private static class ChangeInScores<T>
-	extends RichJoinFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>> {
-		private double changeInScores;
-
-		@Override
-		public void open(Configuration parameters) throws Exception {
-			super.open(parameters);
-
-			changeInScores = 0.0;
-		}
-
-		@Override
-		public void close()
-				throws Exception {
-			super.close();
-
-			DoubleSumAggregator agg = getIterationRuntimeContext().getIterationAggregator(CHANGE_IN_SCORES);
-			agg.aggregate(changeInScores);
-		}
-
-		@Override
-		public Tuple2<T, DoubleValue> join(Tuple2<T, DoubleValue> first, Tuple2<T, DoubleValue> second)
-				throws Exception {
-			changeInScores += Math.abs(second.f1.getValue() - first.f1.getValue());
-			return second;
-		}
-	}
-
-	/**
-	 * Monitors the sum of the absolute change in vertex scores. The algorithm
-	 * terminates when the change in scores compared against the prior iteration
-	 * falls to or below the given convergence threshold.
-	 */
-	private static class ScoreConvergence
-	implements ConvergenceCriterion<DoubleValue> {
-		private double convergenceThreshold;
-
-		public ScoreConvergence(double convergenceThreshold) {
-			this.convergenceThreshold = convergenceThreshold;
-		}
-
-		@Override
-		public boolean isConverged(int iteration, DoubleValue value) {
-			double val = value.getValue();
-			return (val <= convergenceThreshold);
-		}
-	}
-
-	/**
-	 * Map the Tuple result to the return type.
-	 *
-	 * @param <T> ID type
-	 */
-	@ForwardedFields("0; 1")
-	private static class TranslateResult<T>
-		implements MapFunction<Tuple2<T, DoubleValue>, Result<T>> {
-		private Result<T> output = new Result<>();
-
-		@Override
-		public Result<T> map(Tuple2<T, DoubleValue> value) throws Exception {
-			output.f0 = value.f0;
-			output.f1 = value.f1;
-			return output;
-		}
-	}
-
-	/**
-	 * Wraps the {@link Tuple2} to encapsulate results from the PageRank algorithm.
-	 *
-	 * @param <T> ID type
-	 */
-	public static class Result<T>
-	extends Tuple2<T, DoubleValue>
-	implements PrintableResult, UnaryResult<T> {
-		public static final int HASH_SEED = 0x4010af29;
-
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
-
-		@Override
-		public T getVertexId0() {
-			return f0;
-		}
-
-		@Override
-		public void setVertexId0(T value) {
-			f0 = value;
-		}
-
-		/**
-		 * Get the PageRank score.
-		 *
-		 * @return the PageRank score
-		 */
-		public DoubleValue getPageRankScore() {
-			return f1;
-		}
-
-		@Override
-		public String toPrintableString() {
-			return "Vertex ID: " + getVertexId0()
-				+ ", PageRank score: " + getPageRankScore();
-		}
-
-		@Override
-		public int hashCode() {
-			return hasher.reset()
-				.hash(f0.hashCode())
-				.hash(f1.getValue())
-				.hash();
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/Functions.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/Functions.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/Functions.java
new file mode 100644
index 0000000..0fdd46a
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/Functions.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.library.linkanalysis;
+
+import org.apache.flink.api.common.functions.ReduceFunction;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.types.DoubleValue;
+
+class Functions {
+
+	private Functions() {}
+
+	/**
+	 * Sum vertices' scores.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	protected static final class SumScore<T>
+		implements ReduceFunction<Tuple2<T, DoubleValue>> {
+		@Override
+		public Tuple2<T, DoubleValue> reduce(Tuple2<T, DoubleValue> left, Tuple2<T, DoubleValue> right)
+			throws Exception {
+			left.f1.setValue(left.f1.getValue() + right.f1.getValue());
+			return left;
+		}
+	}
+}


[03/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/TestGraphUtils.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/TestGraphUtils.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/TestGraphUtils.java
index 7766723..c160389 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/TestGraphUtils.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/TestGraphUtils.java
@@ -30,6 +30,9 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * Utility methods and data for testing graph algorithms.
+ */
 public class TestGraphUtils {
 
 	public static DataSet<Vertex<Long, Long>> getLongLongVertexData(
@@ -37,7 +40,7 @@ public class TestGraphUtils {
 
 		return env.fromCollection(getLongLongVertices());
 	}
-	
+
 	public static DataSet<Edge<Long, Long>> getLongLongEdgeData(
 			ExecutionEnvironment env) {
 
@@ -187,7 +190,7 @@ public class TestGraphUtils {
 	}
 
 	/**
-	 * A graph with invalid vertex ids
+	 * A graph with invalid vertex ids.
 	 */
 	public static DataSet<Vertex<Long, Long>> getLongLongInvalidVertexData(
 			ExecutionEnvironment env) {
@@ -200,7 +203,7 @@ public class TestGraphUtils {
 	}
 
 	/**
-	 * A graph that has at least one vertex with no ingoing/outgoing edges
+	 * A graph that has at least one vertex with no ingoing/outgoing edges.
 	 */
 	public static DataSet<Edge<Long, Long>> getLongLongEdgeDataWithZeroDegree(
 			ExecutionEnvironment env) {
@@ -216,7 +219,7 @@ public class TestGraphUtils {
 	}
 
 	/**
-	 * Function that produces an ArrayList of vertices
+	 * Function that produces an ArrayList of vertices.
 	 */
 	public static List<Vertex<Long, Long>> getLongLongVertices() {
 		List<Vertex<Long, Long>> vertices = new ArrayList<>();
@@ -246,12 +249,12 @@ public class TestGraphUtils {
 			edges.add(new Edge<>(1L, 3L, 13L));
 			edges.add(new Edge<>(2L, 3L, 23L));
 			edges.add(new Edge<>(4L, 5L, 45L));
-			
+
 			return env.fromCollection(edges);
 		}
-	
+
 	/**
-	 * Function that produces an ArrayList of edges
+	 * Function that produces an ArrayList of edges.
 	 */
 	public static List<Edge<Long, Long>> getLongLongEdges() {
 		List<Edge<Long, Long>> edges = new ArrayList<>();
@@ -262,21 +265,24 @@ public class TestGraphUtils {
 		edges.add(new Edge<>(3L, 5L, 35L));
 		edges.add(new Edge<>(4L, 5L, 45L));
 		edges.add(new Edge<>(5L, 1L, 51L));
-	
+
 		return edges;
 	}
 
+	/**
+	 * Test POJO.
+	 */
 	public static class DummyCustomType implements Serializable {
 		private static final long serialVersionUID = 1L;
-		
+
 		private int intField;
 		private boolean booleanField;
-		
+
 		public DummyCustomType(int intF, boolean boolF) {
 			this.intField = intF;
 			this.booleanField = boolF;
 		}
-		
+
 		public DummyCustomType() {
 			this.intField = 0;
 			this.booleanField = true;
@@ -285,32 +291,37 @@ public class TestGraphUtils {
 		public int getIntField() {
 			return intField;
 		}
-		
+
 		public void setIntField(int intF) {
 			this.intField = intF;
 		}
-		
+
 		public boolean getBooleanField() {
 			return booleanField;
 		}
-		
+
 		@Override
 		public String toString() {
 			return booleanField ? "(T," + intField + ")" : "(F," + intField + ")";
 		}
 	}
-	
+
+	/**
+	 * Generic test POJO.
+	 *
+	 * @param <T> field type
+	 */
 	public static class DummyCustomParameterizedType<T> implements Serializable {
 		private static final long serialVersionUID = 1L;
-		
+
 		private int intField;
 		private T tField;
-		
+
 		public DummyCustomParameterizedType(int intF, T tF) {
 			this.intField = intF;
 			this.tField = tF;
 		}
-		
+
 		public DummyCustomParameterizedType() {
 			this.intField = 0;
 			this.tField = null;
@@ -319,19 +330,19 @@ public class TestGraphUtils {
 		public int getIntField() {
 			return intField;
 		}
-		
+
 		public void setIntField(int intF) {
 			this.intField = intF;
 		}
-		
+
 		public void setTField(T tF) {
 			this.tField = tF;
 		}
-		
+
 		public T getTField() {
 			return tField;
 		}
-		
+
 		@Override
 		public String toString() {
 			return "(" + tField.toString() + "," + intField + ")";
@@ -339,7 +350,7 @@ public class TestGraphUtils {
 	}
 
 	/**
-	 * Method useful for suppressing sysout printing
+	 * Method useful for suppressing sysout printing.
 	 */
 	public static void pipeSystemOutToNull() {
 		System.setOut(new PrintStream(new BlackholeOutputSteam()));
@@ -351,7 +362,8 @@ public class TestGraphUtils {
 	}
 
 	/**
-	 * utils for getting the second graph for the test of method difference();
+	 * utils for getting the second graph for the test of method difference().
+	 *
 	 * @param env - ExecutionEnvironment
 	 */
 	public static DataSet<Edge<Long, Long>> getLongLongEdgeDataDifference(ExecutionEnvironment env) {
@@ -389,4 +401,4 @@ public class TestGraphUtils {
 		edges.add(new Edge<>(6L, 6L, 66L));
 		return edges;
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesITCase.java
index 3e375b5..d9e15dd 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesITCase.java
@@ -26,12 +26,17 @@ import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#inDegrees()}, {@link Graph#outDegrees()},
+ * and {@link Graph#getDegrees()}.
+ */
 @RunWith(Parameterized.class)
 public class DegreesITCase extends MultipleProgramsTestBase {
 
@@ -171,4 +176,4 @@ public class DegreesITCase extends MultipleProgramsTestBase {
 
 		compareResultAsTuples(result, expectedResult);
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java
index 111d421..8b726f4 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/DegreesWithExceptionITCase.java
@@ -21,28 +21,30 @@ package org.apache.flink.graph.test.operations;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.io.DiscardingOutputFormat;
 import org.apache.flink.api.java.tuple.Tuple2;
-
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.configuration.Configuration;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
-
 import org.apache.flink.test.util.TestEnvironment;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.util.TestLogger;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.fail;
 
+/**
+ * Test expected errors for {@link Graph#inDegrees()},
+ * {@link Graph#outDegrees()}, and {@link Graph#getDegrees()}.
+ */
 public class DegreesWithExceptionITCase extends TestLogger {
 
 	private static final int PARALLELISM = 4;
 
 	private static LocalFlinkMiniCluster cluster;
-	
 
 	@BeforeClass
 	public static void setupCluster() {
@@ -62,7 +64,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test outDegrees() with an edge having a srcId that does not exist in the vertex DataSet
+	 * Test outDegrees() with an edge having a srcId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testOutDegreesInvalidEdgeSrcId() throws Exception {
@@ -70,7 +72,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 		env.setParallelism(PARALLELISM);
 		env.getConfig().disableSysoutLogging();
-		
+
 		Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
 				TestGraphUtils.getLongLongEdgeInvalidSrcData(env), env);
 
@@ -85,7 +87,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test inDegrees() with an edge having a trgId that does not exist in the vertex DataSet
+	 * Test inDegrees() with an edge having a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testInDegreesInvalidEdgeTrgId() throws Exception {
@@ -108,7 +110,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test getDegrees() with an edge having a trgId that does not exist in the vertex DataSet
+	 * Test getDegrees() with an edge having a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGetDegreesInvalidEdgeTrgId() throws Exception {
@@ -131,7 +133,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test getDegrees() with an edge having a srcId that does not exist in the vertex DataSet
+	 * Test getDegrees() with an edge having a srcId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGetDegreesInvalidEdgeSrcId() throws Exception {
@@ -154,7 +156,7 @@ public class DegreesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test getDegrees() with an edge having a srcId and a trgId that does not exist in the vertex DataSet
+	 * Test getDegrees() with an edge having a srcId and a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGetDegreesInvalidEdgeSrcTrgId() throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/FromCollectionITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/FromCollectionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/FromCollectionITCase.java
index ab2ffe0..7995bae 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/FromCollectionITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/FromCollectionITCase.java
@@ -27,12 +27,16 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Test creating graphs from collections.
+ */
 @RunWith(Parameterized.class)
 public class FromCollectionITCase extends MultipleProgramsTestBase {
 
@@ -74,7 +78,6 @@ public class FromCollectionITCase extends MultipleProgramsTestBase {
 		Graph<Long, NullValue, Long> graph = Graph.fromCollection(TestGraphUtils.getLongLongEdges(),
 			env);
 
-
 		DataSet<Vertex<Long, NullValue>> data = graph.getVertices();
 		List<Vertex<Long, NullValue>> result = data.collect();
 
@@ -115,4 +118,4 @@ public class FromCollectionITCase extends MultipleProgramsTestBase {
 			return vertexId * 2;
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java
index 77dd4d1..991a420 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationITCase.java
@@ -30,6 +30,7 @@ import org.apache.flink.graph.test.TestGraphUtils.DummyCustomParameterizedType;
 import org.apache.flink.graph.validation.InvalidVertexIdsValidator;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -37,6 +38,9 @@ import org.junit.runners.Parameterized;
 import java.util.LinkedList;
 import java.util.List;
 
+/**
+ * Test graph creation and validation from datasets and tuples.
+ */
 @RunWith(Parameterized.class)
 public class GraphCreationITCase extends MultipleProgramsTestBase {
 
@@ -122,7 +126,7 @@ public class GraphCreationITCase extends MultipleProgramsTestBase {
 
 		//env.fromElements(result).writeAsText(resultPath);
 
-		String res = valid.toString();//env.fromElements(valid);
+		String res = valid.toString(); //env.fromElements(valid);
 		List<String> result = new LinkedList<>();
 		result.add(res);
 		expectedResult = "true";
@@ -142,7 +146,7 @@ public class GraphCreationITCase extends MultipleProgramsTestBase {
 		Graph<Long, Long, Long> graph = Graph.fromDataSet(vertices, edges, env);
 		Boolean valid = graph.validate(new InvalidVertexIdsValidator<Long, Long, Long>());
 
-		String res = valid.toString();//env.fromElements(valid);
+		String res = valid.toString(); //env.fromElements(valid);
 		List<String> result = new LinkedList<>();
 		result.add(res);
 
@@ -230,4 +234,4 @@ public class GraphCreationITCase extends MultipleProgramsTestBase {
 			return "boo";
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithCsvITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithCsvITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithCsvITCase.java
index 812f418..e92e644 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithCsvITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithCsvITCase.java
@@ -26,6 +26,7 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Triplet;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -37,6 +38,9 @@ import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
 import java.util.List;
 
+/**
+ * Test graph creation from CSV.
+ */
 @RunWith(Parameterized.class)
 public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 
@@ -52,12 +56,12 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 		 * Test with two Csv files one with Vertex Data and one with Edges data
 		 */
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-		final String fileContent = "1,1\n"+
-				"2,2\n"+
+		final String fileContent = "1,1\n" +
+				"2,2\n" +
 				"3,3\n";
 		final FileInputSplit split = createTempFile(fileContent);
-		final String fileContent2 = "1,2,ot\n"+
-				"3,2,tt\n"+
+		final String fileContent2 = "1,2,ot\n" +
+				"3,2,tt\n" +
 				"3,1,to\n";
 		final FileInputSplit split2 = createTempFile(fileContent2);
 
@@ -79,11 +83,11 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 		Test fromCsvReader with edge and vertex path and nullvalue for edge
 		 */
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-		final String vertexFileContent = "1,one\n"+
-				"2,two\n"+
+		final String vertexFileContent = "1,one\n" +
+				"2,two\n" +
 				"3,three\n";
-		final String edgeFileContent = "1,2\n"+
-				"3,2\n"+
+		final String edgeFileContent = "1,2\n" +
+				"3,2\n" +
 				"3,1\n";
 		final FileInputSplit split = createTempFile(vertexFileContent);
 		final FileInputSplit edgeSplit = createTempFile(edgeFileContent);
@@ -93,8 +97,8 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 
 		List<Triplet<Long, String, NullValue>> result = graph.getTriplets().collect();
 
-		expectedResult = "1,2,one,two,(null)\n"+
-				"3,2,three,two,(null)\n"+
+		expectedResult = "1,2,one,two,(null)\n" +
+				"3,2,three,two,(null)\n" +
 				"3,1,three,one,(null)\n";
 
 		compareResultAsTuples(result, expectedResult);
@@ -106,8 +110,8 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 		*Test fromCsvReader with edge path and a mapper that assigns a Double constant as value
 		 */
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-		final String fileContent = "1,2,ot\n"+
-				"3,2,tt\n"+
+		final String fileContent = "1,2,ot\n" +
+				"3,2,tt\n" +
 				"3,1,to\n";
 		final FileInputSplit split = createTempFile(fileContent);
 
@@ -126,12 +130,12 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 		 * Test with one Csv file one with Edges data. Also tests the configuration method ignoreFistLineEdges()
 		 */
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-		final String fileContent2 = "header\n1,2,ot\n"+
-				"3,2,tt\n"+
+		final String fileContent2 = "header\n1,2,ot\n" +
+				"3,2,tt\n" +
 				"3,1,to\n";
 
 		final FileInputSplit split2 = createTempFile(fileContent2);
-		Graph<Long, NullValue, String> graph= Graph.fromCsvReader(split2.getPath().toString(), env)
+		Graph<Long, NullValue, String> graph = Graph.fromCsvReader(split2.getPath().toString(), env)
 				.ignoreFirstLineEdges()
 				.ignoreCommentsVertices("hi")
 				.edgeTypes(Long.class, String.class);
@@ -153,19 +157,19 @@ public class GraphCreationWithCsvITCase extends MultipleProgramsTestBase {
 		 */
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 
-		final String fileContent = "header\n1;1\n"+
-				"2;2\n"+
+		final String fileContent = "header\n1;1\n" +
+				"2;2\n" +
 				"3;3\n";
 
 		final FileInputSplit split = createTempFile(fileContent);
 
-		final String fileContent2 = "header|1:2:ot|"+
-				"3:2:tt|"+
+		final String fileContent2 = "header|1:2:ot|" +
+				"3:2:tt|" +
 				"3:1:to|";
 
 		final FileInputSplit split2 = createTempFile(fileContent2);
 
-		Graph<Long, Long, String> graph= Graph.fromCsvReader(split.getPath().toString(), split2.getPath().toString(), env).
+		Graph<Long, Long, String> graph = Graph.fromCsvReader(split.getPath().toString(), split2.getPath().toString(), env).
 				ignoreFirstLineEdges().ignoreFirstLineVertices().
 				fieldDelimiterEdges(":").fieldDelimiterVertices(";").
 				lineDelimiterEdges("|").

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithMapperITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithMapperITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithMapperITCase.java
index 148952c..b67557b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithMapperITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphCreationWithMapperITCase.java
@@ -27,12 +27,16 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomType;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Test graph creation with a mapper.
+ */
 @RunWith(Parameterized.class)
 public class GraphCreationWithMapperITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphMutationsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphMutationsITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphMutationsITCase.java
index c72750e..75f8d9c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphMutationsITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphMutationsITCase.java
@@ -25,6 +25,7 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -32,6 +33,9 @@ import org.junit.runners.Parameterized;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * Tests for adding and removing {@link Graph} vertices and edges.
+ */
 @RunWith(Parameterized.class)
 public class GraphMutationsITCase extends MultipleProgramsTestBase {
 
@@ -45,7 +49,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 	public void testAddVertex() throws Exception {
 		/*
 		 * Test addVertex() -- simple case
-		 */	
+		 */
 
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 
@@ -88,7 +92,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addVertices(vertices);
 
 		DataSet<Vertex<Long, Long>> data = graph.getVertices();
-		List<Vertex<Long, Long>> result= data.collect();
+		List<Vertex<Long, Long>> result = data.collect();
 
 		expectedResult = "1,1\n" +
 				"2,2\n" +
@@ -114,7 +118,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addVertex(new Vertex<>(1L, 1L));
 
 		DataSet<Vertex<Long, Long>> data = graph.getVertices();
-		List<Vertex<Long, Long>> result= data.collect();
+		List<Vertex<Long, Long>> result = data.collect();
 
 		expectedResult = "1,1\n" +
 				"2,2\n" +
@@ -143,7 +147,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addVertices(vertices);
 
 		DataSet<Vertex<Long, Long>> data = graph.getVertices();
-		List<Vertex<Long, Long>> result= data.collect();
+		List<Vertex<Long, Long>> result = data.collect();
 
 		expectedResult = "1,1\n" +
 				"2,2\n" +
@@ -172,7 +176,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addVertices(vertices);
 
 		DataSet<Vertex<Long, Long>> data = graph.getVertices();
-		List<Vertex<Long, Long>> result= data.collect();
+		List<Vertex<Long, Long>> result = data.collect();
 
 		expectedResult = "1,1\n" +
 				"2,2\n" +
@@ -197,7 +201,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertex(new Vertex<>(5L, 5L));
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -225,7 +229,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertices(verticesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "3,4,34\n" +
 				"3,5,35\n" +
@@ -247,7 +251,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertex(new Vertex<>(6L, 6L));
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -277,7 +281,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertices(verticesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "2,3,23\n" +
 				"3,4,34\n" +
@@ -304,7 +308,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertices(verticesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -334,7 +338,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeVertices(verticesToBeRemoved);
 
 		DataSet<Vertex<Long, Long>> data = graph.getVertices();
-		List<Vertex<Long, Long>> result= data.collect();
+		List<Vertex<Long, Long>> result = data.collect();
 
 		expectedResult = "1,1\n" +
 				"2,2\n" +
@@ -358,7 +362,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addEdge(new Vertex<>(6L, 6L), new Vertex<>(1L, 1L), 61L);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -390,7 +394,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addEdges(edgesToBeAdded);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -423,7 +427,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.addEdges(edgesToBeAdded);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -450,7 +454,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 				12L);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,2,12\n" +
@@ -481,7 +485,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeEdge(new Edge<>(5L, 1L, 51L));
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,2,12\n" +
@@ -514,7 +518,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeEdges(edgesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,2,12\n" +
@@ -543,7 +547,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeEdges(edgesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -568,7 +572,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeEdge(new Edge<>(6L, 1L, 61L));
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -598,7 +602,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 		graph = graph.removeEdges(edgesToBeRemoved);
 
 		DataSet<Edge<Long, Long>> data = graph.getEdges();
-		List<Edge<Long, Long>> result= data.collect();
+		List<Edge<Long, Long>> result = data.collect();
 
 		expectedResult = "1,2,12\n" +
 				"1,3,13\n" +
@@ -607,7 +611,7 @@ public class GraphMutationsITCase extends MultipleProgramsTestBase {
 				"3,5,35\n" +
 				"4,5,45\n" +
 				"5,1,51\n";
-		
+
 		compareResultAsTuples(result, expectedResult);
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphOperationsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphOperationsITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphOperationsITCase.java
index e03e8cf..cbb4194 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphOperationsITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/GraphOperationsITCase.java
@@ -30,6 +30,7 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -37,6 +38,9 @@ import org.junit.runners.Parameterized;
 import java.util.ArrayList;
 import java.util.List;
 
+/**
+ * Tests for {@link Graph} operations.
+ */
 @RunWith(Parameterized.class)
 public class GraphOperationsITCase extends MultipleProgramsTestBase {
 
@@ -457,4 +461,4 @@ public class GraphOperationsITCase extends MultipleProgramsTestBase {
 
 		compareResultAsTuples(result, expectedResult);
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java
index 6988218..43ff124 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithEdgesITCase.java
@@ -30,12 +30,17 @@ import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomParameterizedType;
 import org.apache.flink.graph.utils.EdgeToTuple3Map;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#joinWithEdges}, {@link Graph#joinWithEdgesOnSource},
+ * and {@link Graph#joinWithEdgesOnTarget}.
+ */
 @RunWith(Parameterized.class)
 public class JoinWithEdgesITCase extends MultipleProgramsTestBase {
 
@@ -532,4 +537,4 @@ public class JoinWithEdgesITCase extends MultipleProgramsTestBase {
 			return new Tuple2<>(edge.getTarget(), true);
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java
index 7676e8c..181c1a7 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/JoinWithVerticesITCase.java
@@ -29,12 +29,16 @@ import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomParameterizedType;
 import org.apache.flink.graph.utils.VertexToTuple2Map;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#joinWithVertices}.
+ */
 @RunWith(Parameterized.class)
 public class JoinWithVerticesITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapEdgesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapEdgesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapEdgesITCase.java
index 34a2518..f70b4fb 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapEdgesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapEdgesITCase.java
@@ -28,12 +28,16 @@ import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomParameterizedType;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomType;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#mapEdges}.
+ */
 @RunWith(Parameterized.class)
 public class MapEdgesITCase extends MultipleProgramsTestBase {
 
@@ -207,4 +211,4 @@ public class MapEdgesITCase extends MultipleProgramsTestBase {
 			return dummyValue;
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapVerticesITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapVerticesITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapVerticesITCase.java
index 24aae7b..9d1609f 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapVerticesITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/MapVerticesITCase.java
@@ -28,12 +28,16 @@ import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomParameterizedType;
 import org.apache.flink.graph.test.TestGraphUtils.DummyCustomType;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#mapVertices}.
+ */
 @RunWith(Parameterized.class)
 public class MapVerticesITCase extends MultipleProgramsTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesMethodsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesMethodsITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesMethodsITCase.java
index b26bb43..dbd9f64 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesMethodsITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesMethodsITCase.java
@@ -31,6 +31,7 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.util.Collector;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -38,6 +39,9 @@ import org.junit.runners.Parameterized;
 import java.util.List;
 import java.util.Objects;
 
+/**
+ * Tests for {@link Graph#groupReduceOnEdges} and {@link Graph#reduceOnEdges}.
+ */
 @RunWith(Parameterized.class)
 public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
@@ -403,7 +407,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v,
-		                         Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long weight = Long.MAX_VALUE;
 			long minNeighborId = 0;
@@ -423,7 +427,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v,
-		                         Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long weight = Long.MIN_VALUE;
 
@@ -459,7 +463,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v,
-		                         Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Edge<Long, Long>> edges, Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long weight = Long.MAX_VALUE;
 			long minNeighborId = 0;
@@ -479,7 +483,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				out.collect(new Tuple2<>(edge.f0, edge.f1.getTarget()));
@@ -492,7 +496,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				if (edge.f0 != 5) {
@@ -508,7 +512,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v, Iterable<Edge<Long, Long>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Edge<Long, Long> edge : edges) {
 				if (v.getValue() > 2) {
@@ -523,7 +527,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				out.collect(new Tuple2<>(edge.f0, edge.f1.getSource()));
@@ -536,7 +540,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				if (edge.f0 != 5) {
@@ -552,7 +556,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v, Iterable<Edge<Long, Long>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Edge<Long, Long> edge : edges) {
 				if (v.getValue() > 2) {
@@ -567,7 +571,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				if (Objects.equals(edge.f0, edge.f1.getTarget())) {
 					out.collect(new Tuple2<>(edge.f0, edge.f1.getSource()));
@@ -583,7 +587,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<Long, Edge<Long, Long>>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Tuple2<Long, Edge<Long, Long>> edge : edges) {
 				if (edge.f0 != 5 && edge.f0 != 2) {
@@ -603,7 +607,7 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v, Iterable<Edge<Long, Long>> edges,
-		                         Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			for (Edge<Long, Long> edge : edges) {
 				if (v.getValue() > 4) {
@@ -616,4 +620,4 @@ public class ReduceOnEdgesMethodsITCase extends MultipleProgramsTestBase {
 			}
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java
index 7a0a30c..19e701d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnEdgesWithExceptionITCase.java
@@ -34,19 +34,22 @@ import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
 import org.apache.flink.test.util.TestEnvironment;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.TestLogger;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
 
+/**
+ * Test expected exceptions for {@link Graph#groupReduceOnEdges}.
+ */
 public class ReduceOnEdgesWithExceptionITCase extends TestLogger {
 
 	private static final int PARALLELISM = 4;
 
 	private static LocalFlinkMiniCluster cluster;
 
-
 	@BeforeClass
 	public static void setupCluster() {
 		Configuration config = new Configuration();
@@ -65,7 +68,7 @@ public class ReduceOnEdgesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test groupReduceOnEdges() with an edge having a srcId that does not exist in the vertex DataSet
+	 * Test groupReduceOnEdges() with an edge having a srcId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnEdgesInvalidEdgeSrcId() throws Exception {
@@ -91,7 +94,7 @@ public class ReduceOnEdgesWithExceptionITCase extends TestLogger {
 	}
 
 	/**
-	 * Test groupReduceOnEdges() with an edge having a trgId that does not exist in the vertex DataSet
+	 * Test groupReduceOnEdges() with an edge having a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnEdgesInvalidEdgeTrgId() throws Exception {
@@ -116,17 +119,16 @@ public class ReduceOnEdgesWithExceptionITCase extends TestLogger {
 		}
 	}
 
-
 	@SuppressWarnings("serial")
 	private static final class SelectNeighborsValueGreaterThanFour implements
 			EdgesFunctionWithVertexValue<Long, Long, Long, Tuple2<Long, Long>> {
 
 		@Override
 		public void iterateEdges(Vertex<Long, Long> v, Iterable<Edge<Long, Long>> edges,
-								 Collector<Tuple2<Long, Long>> out) throws Exception {
-			for(Edge<Long, Long> edge : edges) {
-				if(v.getValue() > 4) {
-					if(v.getId().equals(edge.getTarget())) {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
+			for (Edge<Long, Long> edge : edges) {
+				if (v.getValue() > 4) {
+					if (v.getId().equals(edge.getTarget())) {
 						out.collect(new Tuple2<>(v.getId(), edge.getSource()));
 					} else {
 						out.collect(new Tuple2<>(v.getId(), edge.getTarget()));

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborMethodsITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborMethodsITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborMethodsITCase.java
index 7fad2e8..ee48b84 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborMethodsITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborMethodsITCase.java
@@ -32,12 +32,16 @@ import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.util.Collector;
+
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.util.List;
 
+/**
+ * Tests for {@link Graph#groupReduceOnNeighbors} and {@link Graph#reduceOnNeighbors}.
+ */
 @RunWith(Parameterized.class)
 public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
@@ -410,8 +414,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -427,8 +431,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -444,8 +448,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -461,8 +465,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -480,8 +484,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -499,8 +503,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -526,7 +530,7 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Iterable<Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>> next = null;
@@ -544,7 +548,7 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Iterable<Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>> next = null;
@@ -565,7 +569,7 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Iterable<Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>> next = null;
@@ -586,7 +590,7 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Iterable<Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			Tuple3<Long, Edge<Long, Long>, Vertex<Long, Long>> next = null;
@@ -607,8 +611,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -625,8 +629,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -643,8 +647,8 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-		                             Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-		                             Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {
@@ -654,4 +658,4 @@ public class ReduceOnNeighborMethodsITCase extends MultipleProgramsTestBase {
 			out.collect(new Tuple2<>(vertex.getId(), sum + vertex.getValue() + 5));
 		}
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java
index b337bca..d3b97a1 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/ReduceOnNeighborsWithExceptionITCase.java
@@ -35,19 +35,22 @@ import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster;
 import org.apache.flink.test.util.TestEnvironment;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.TestLogger;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
 import static org.junit.Assert.fail;
 
+/**
+ * Test expected exceptions for {@link Graph#groupReduceOnNeighbors} and {@link Graph#reduceOnNeighbors}.
+ */
 public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 	private static final int PARALLELISM = 4;
 
 	private static LocalFlinkMiniCluster cluster;
 
-
 	@BeforeClass
 	public static void setupCluster() {
 		Configuration config = new Configuration();
@@ -67,7 +70,7 @@ public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 	/**
 	 * Test groupReduceOnNeighbors() -NeighborsFunctionWithVertexValue-
-	 * with an edge having a srcId that does not exist in the vertex DataSet
+	 * with an edge having a srcId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnNeighborsWithVVInvalidEdgeSrcId() throws Exception {
@@ -94,7 +97,7 @@ public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 	/**
 	 * Test groupReduceOnNeighbors() -NeighborsFunctionWithVertexValue-
-	 * with an edge having a trgId that does not exist in the vertex DataSet
+	 * with an edge having a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnNeighborsWithVVInvalidEdgeTrgId() throws Exception {
@@ -121,7 +124,7 @@ public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 	/**
 	 * Test groupReduceOnNeighbors() -NeighborsFunction-
-	 * with an edge having a srcId that does not exist in the vertex DataSet
+	 * with an edge having a srcId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnNeighborsInvalidEdgeSrcId() throws Exception {
@@ -146,7 +149,7 @@ public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 	/**
 	 * Test groupReduceOnNeighbors() -NeighborsFunction-
-	 * with an edge having a trgId that does not exist in the vertex DataSet
+	 * with an edge having a trgId that does not exist in the vertex DataSet.
 	 */
 	@Test
 	public void testGroupReduceOnNeighborsInvalidEdgeTrgId() throws Exception {
@@ -175,8 +178,8 @@ public class ReduceOnNeighborsWithExceptionITCase extends TestLogger {
 
 		@Override
 		public void iterateNeighbors(Vertex<Long, Long> vertex,
-									 Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
-									 Collector<Tuple2<Long, Long>> out) throws Exception {
+				Iterable<Tuple2<Edge<Long, Long>, Vertex<Long, Long>>> neighbors,
+				Collector<Tuple2<Long, Long>> out) throws Exception {
 
 			long sum = 0;
 			for (Tuple2<Edge<Long, Long>, Vertex<Long, Long>> neighbor : neighbors) {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java
index 484ef3d..4d9040c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/operations/TypeExtractorTest.java
@@ -24,13 +24,21 @@ import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.api.java.typeutils.TupleTypeInfo;
-import org.apache.flink.graph.*;
+import org.apache.flink.graph.Edge;
+import org.apache.flink.graph.EdgeDirection;
+import org.apache.flink.graph.EdgesFunction;
+import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.test.TestGraphUtils;
 import org.apache.flink.util.Collector;
+
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+/**
+ * Test output types from {@link Graph} methods.
+ */
 public class TypeExtractorTest {
 
 	private Graph<Long, Long, Long> inputGraph;
@@ -81,7 +89,7 @@ public class TypeExtractorTest {
 		Assert.assertTrue((new TupleTypeInfo<Tuple2<Long, Long>>(BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO)).equals(output.getType()));
 	}
 
-	public static final class VertexMapper<K> implements MapFunction<Vertex<K, Long>, Tuple2<K, Integer>> {
+	private static final class VertexMapper<K> implements MapFunction<Vertex<K, Long>, Tuple2<K, Integer>> {
 
 		private final Tuple2<K, Integer> outTuple = new Tuple2<>();
 
@@ -91,7 +99,7 @@ public class TypeExtractorTest {
 		}
 	}
 
-	public static final class EdgeMapper<K> implements MapFunction<Edge<K, Long>, Tuple2<K, Integer>> {
+	private static final class EdgeMapper<K> implements MapFunction<Edge<K, Long>, Tuple2<K, Integer>> {
 
 		private final Tuple2<K, Integer> outTuple = new Tuple2<>();
 
@@ -101,7 +109,7 @@ public class TypeExtractorTest {
 		}
 	}
 
-	public static final class EdgesGroupFunction<K, EV> implements EdgesFunction<K, EV, Tuple2<K, EV>> {
+	private static final class EdgesGroupFunction<K, EV> implements EdgesFunction<K, EV, Tuple2<K, EV>> {
 
 		@Override
 		public void iterateEdges(Iterable<Tuple2<K, Edge<K, EV>>> edges, Collector<Tuple2<K, EV>> out) throws Exception {
@@ -109,7 +117,7 @@ public class TypeExtractorTest {
 		}
 	}
 
-	public static final class VertexInitializer<K> implements MapFunction<K, Tuple2<K, Integer>> {
+	private static final class VertexInitializer<K> implements MapFunction<K, Tuple2<K, Integer>> {
 
 		@Override
 		public Tuple2<K, Integer> map(K value) throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparatorTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparatorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparatorTest.java
index bbc6846..89cb3f9 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparatorTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayComparatorTest.java
@@ -23,6 +23,9 @@ import org.apache.flink.api.common.typeutils.TypeComparator;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.types.IntValue;
 
+/**
+ * Tests for {@link IntValueArrayComparator}.
+ */
 public class IntValueArrayComparatorTest extends ComparatorTestBase<IntValueArray> {
 
 	@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArraySerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArraySerializerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArraySerializerTest.java
index 1ee24c9..88a367c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArraySerializerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArraySerializerTest.java
@@ -83,7 +83,7 @@ public class IntValueArraySerializerTest extends SerializerTestBase<IntValueArra
 
 		IntValueArray iva8 = new IntValueArray();
 		iva8.addAll(iva7);
-		for (int i = 0 ; i < 1.5 * defaultElements ; i++) {
+		for (int i = 0; i < 1.5 * defaultElements; i++) {
 			iva8.add(new IntValue(i));
 		}
 		iva8.addAll(iva8);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayTest.java
index 2e1282a..77d98a9 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/IntValueArrayTest.java
@@ -19,12 +19,16 @@
 package org.apache.flink.graph.types.valuearray;
 
 import org.apache.flink.types.IntValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link IntValueArray}.
+ */
 public class IntValueArrayTest {
 
 	@Test
@@ -34,7 +38,7 @@ public class IntValueArrayTest {
 		ValueArray<IntValue> iva = new IntValueArray(IntValueArray.DEFAULT_CAPACITY_IN_BYTES);
 
 		// fill the array
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(iva.isFull());
 			assertEquals(i, iva.size());
 
@@ -61,9 +65,9 @@ public class IntValueArrayTest {
 		assertEquals(iva, iva.copy());
 
 		// test copyTo
-		IntValueArray iva_to = new IntValueArray();
-		iva.copyTo(iva_to);
-		assertEquals(iva, iva_to);
+		IntValueArray ivaTo = new IntValueArray();
+		iva.copyTo(ivaTo);
+		assertEquals(iva, ivaTo);
 
 		// test clear
 		iva.clear();
@@ -77,7 +81,7 @@ public class IntValueArrayTest {
 		ValueArray<IntValue> iva = new IntValueArray();
 
 		// add several elements
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(iva.isFull());
 			assertEquals(i, iva.size());
 
@@ -104,9 +108,9 @@ public class IntValueArrayTest {
 		assertEquals(iva, iva.copy());
 
 		// test copyTo
-		IntValueArray iva_to = new IntValueArray();
-		iva.copyTo(iva_to);
-		assertEquals(iva, iva_to);
+		IntValueArray ivaTo = new IntValueArray();
+		iva.copyTo(ivaTo);
+		assertEquals(iva, ivaTo);
 
 		// test mark/reset
 		int size = iva.size();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparatorTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparatorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparatorTest.java
index af9dbdb..e76b840 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparatorTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayComparatorTest.java
@@ -23,6 +23,9 @@ import org.apache.flink.api.common.typeutils.TypeComparator;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.types.LongValue;
 
+/**
+ * Tests for {@link LongValueArrayComparator}.
+ */
 public class LongValueArrayComparatorTest extends ComparatorTestBase<LongValueArray> {
 
 	@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArraySerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArraySerializerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArraySerializerTest.java
index 1cd0a6c..50201da 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArraySerializerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArraySerializerTest.java
@@ -83,7 +83,7 @@ public class LongValueArraySerializerTest extends SerializerTestBase<LongValueAr
 
 		LongValueArray lva8 = new LongValueArray();
 		lva8.addAll(lva7);
-		for (int i = 0 ; i < 1.5 * defaultElements ; i++) {
+		for (int i = 0; i < 1.5 * defaultElements; i++) {
 			lva8.add(new LongValue(i));
 		}
 		lva8.addAll(lva8);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayTest.java
index cfc345e..7acdc4c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/LongValueArrayTest.java
@@ -19,12 +19,16 @@
 package org.apache.flink.graph.types.valuearray;
 
 import org.apache.flink.types.LongValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link LongValueArray}.
+ */
 public class LongValueArrayTest {
 
 	@Test
@@ -34,7 +38,7 @@ public class LongValueArrayTest {
 		ValueArray<LongValue> lva = new LongValueArray(LongValueArray.DEFAULT_CAPACITY_IN_BYTES);
 
 		// fill the array
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(lva.isFull());
 			assertEquals(i, lva.size());
 
@@ -61,9 +65,9 @@ public class LongValueArrayTest {
 		assertEquals(lva, lva.copy());
 
 		// test copyTo
-		LongValueArray lva_to = new LongValueArray();
-		lva.copyTo(lva_to);
-		assertEquals(lva, lva_to);
+		LongValueArray lvaTo = new LongValueArray();
+		lva.copyTo(lvaTo);
+		assertEquals(lva, lvaTo);
 
 		// test clear
 		lva.clear();
@@ -77,7 +81,7 @@ public class LongValueArrayTest {
 		ValueArray<LongValue> lva = new LongValueArray();
 
 		// add several elements
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(lva.isFull());
 			assertEquals(i, lva.size());
 
@@ -104,9 +108,9 @@ public class LongValueArrayTest {
 		assertEquals(lva, lva.copy());
 
 		// test copyTo
-		LongValueArray lva_to = new LongValueArray();
-		lva.copyTo(lva_to);
-		assertEquals(lva, lva_to);
+		LongValueArray lvaTo = new LongValueArray();
+		lva.copyTo(lvaTo);
+		assertEquals(lva, lvaTo);
 
 		// test mark/reset
 		int size = lva.size();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparatorTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparatorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparatorTest.java
index 10fa2e2..18f963f 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparatorTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayComparatorTest.java
@@ -23,6 +23,9 @@ import org.apache.flink.api.common.typeutils.TypeComparator;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.types.NullValue;
 
+/**
+ * Tests for {@link NullValueArrayComparator}.
+ */
 public class NullValueArrayComparatorTest extends ComparatorTestBase<NullValueArray> {
 
 	@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArraySerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArraySerializerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArraySerializerTest.java
index 2253a42..8cf1efa 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArraySerializerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArraySerializerTest.java
@@ -58,7 +58,7 @@ public class NullValueArraySerializerTest extends SerializerTestBase<NullValueAr
 
 		NullValueArray nva3 = new NullValueArray();
 		nva3.addAll(nva2);
-		for (int i = 0 ; i < 100 ; i++) {
+		for (int i = 0; i < 100; i++) {
 			nva3.add(nv);
 		}
 		nva3.addAll(nva3);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayTest.java
index 6d013a1..988812e 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/NullValueArrayTest.java
@@ -19,12 +19,16 @@
 package org.apache.flink.graph.types.valuearray;
 
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link NullValueArray}.
+ */
 public class NullValueArrayTest {
 
 	@Test
@@ -34,7 +38,7 @@ public class NullValueArrayTest {
 		ValueArray<NullValue> nva = new NullValueArray();
 
 		// add several elements
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(nva.isFull());
 			assertEquals(i, nva.size());
 
@@ -61,9 +65,9 @@ public class NullValueArrayTest {
 		assertEquals(nva, nva.copy());
 
 		// test copyTo
-		NullValueArray nva_to = new NullValueArray();
-		nva.copyTo(nva_to);
-		assertEquals(nva, nva_to);
+		NullValueArray nvaTo = new NullValueArray();
+		nva.copyTo(nvaTo);
+		assertEquals(nva, nvaTo);
 
 		// test mark/reset
 		int size = nva.size();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparatorTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparatorTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparatorTest.java
index e7cc102..618aa8b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparatorTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayComparatorTest.java
@@ -23,6 +23,9 @@ import org.apache.flink.api.common.typeutils.TypeComparator;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.types.StringValue;
 
+/**
+ * Tests for {@link StringValueArrayComparator}.
+ */
 public class StringValueArrayComparatorTest extends ComparatorTestBase<StringValueArray> {
 
 	@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArraySerializerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArraySerializerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArraySerializerTest.java
index f5909da..52892ca 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArraySerializerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArraySerializerTest.java
@@ -83,7 +83,7 @@ public class StringValueArraySerializerTest extends SerializerTestBase<StringVal
 
 		StringValueArray sva8 = new StringValueArray();
 		sva8.addAll(sva7);
-		for (int i = 0 ; i < 1.5 * defaultElements ; i++) {
+		for (int i = 0; i < 1.5 * defaultElements; i++) {
 			sva8.add(new StringValue(String.valueOf(i)));
 		}
 		sva8.addAll(sva8);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayTest.java
index a425e8e..d10e984 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/StringValueArrayTest.java
@@ -19,12 +19,16 @@
 package org.apache.flink.graph.types.valuearray;
 
 import org.apache.flink.types.StringValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Tests for {@link StringValueArray}.
+ */
 public class StringValueArrayTest {
 
 	@Test
@@ -35,11 +39,11 @@ public class StringValueArrayTest {
 		ValueArray<StringValue> sva = new StringValueArray(StringValueArray.DEFAULT_CAPACITY_IN_BYTES);
 
 		// fill the array
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(sva.isFull());
 			assertEquals(i, sva.size());
 
-			assertTrue(sva.add(new StringValue(Character.toString((char)(i & 0x7F)))));
+			assertTrue(sva.add(new StringValue(Character.toString((char) (i & 0x7F)))));
 
 			assertEquals(i + 1, sva.size());
 		}
@@ -55,16 +59,16 @@ public class StringValueArrayTest {
 		}
 
 		// add element past end of array
-		assertFalse(sva.add(new StringValue(String.valueOf((char)count))));
+		assertFalse(sva.add(new StringValue(String.valueOf((char) count))));
 		assertFalse(sva.addAll(sva));
 
 		// test copy
 		assertEquals(sva, sva.copy());
 
 		// test copyTo
-		StringValueArray sva_to = new StringValueArray();
-		sva.copyTo(sva_to);
-		assertEquals(sva, sva_to);
+		StringValueArray svaTo = new StringValueArray();
+		sva.copyTo(svaTo);
+		assertEquals(sva, svaTo);
 
 		// test clear
 		sva.clear();
@@ -79,11 +83,11 @@ public class StringValueArrayTest {
 		ValueArray<StringValue> sva = new StringValueArray(3200);
 
 		// fill the array
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(sva.isFull());
 			assertEquals(i, sva.size());
 
-			assertTrue(sva.add(new StringValue(Character.toString((char)(i & 0xFF)))));
+			assertTrue(sva.add(new StringValue(Character.toString((char) (i & 0xFF)))));
 
 			assertEquals(i + 1, sva.size());
 		}
@@ -99,16 +103,16 @@ public class StringValueArrayTest {
 		}
 
 		// add element past end of array
-		assertFalse(sva.add(new StringValue(String.valueOf((char)count))));
+		assertFalse(sva.add(new StringValue(String.valueOf((char) count))));
 		assertFalse(sva.addAll(sva));
 
 		// test copy
 		assertEquals(sva, sva.copy());
 
 		// test copyTo
-		StringValueArray sva_to = new StringValueArray();
-		sva.copyTo(sva_to);
-		assertEquals(sva, sva_to);
+		StringValueArray svaTo = new StringValueArray();
+		sva.copyTo(svaTo);
+		assertEquals(sva, svaTo);
 
 		// test clear
 		sva.clear();
@@ -122,11 +126,11 @@ public class StringValueArrayTest {
 		ValueArray<StringValue> sva = new StringValueArray();
 
 		// add several elements
-		for (int i = 0 ; i < count ; i++) {
+		for (int i = 0; i < count; i++) {
 			assertFalse(sva.isFull());
 			assertEquals(i, sva.size());
 
-			assertTrue(sva.add(new StringValue(String.valueOf((char)i))));
+			assertTrue(sva.add(new StringValue(String.valueOf((char) i))));
 
 			assertEquals(i + 1, sva.size());
 		}
@@ -142,16 +146,16 @@ public class StringValueArrayTest {
 		}
 
 		// add element past end of array
-		assertTrue(sva.add(new StringValue(String.valueOf((char)count))));
+		assertTrue(sva.add(new StringValue(String.valueOf((char) count))));
 		assertTrue(sva.addAll(sva));
 
 		// test copy
 		assertEquals(sva, sva.copy());
 
 		// test copyTo
-		StringValueArray sva_to = new StringValueArray();
-		sva.copyTo(sva_to);
-		assertEquals(sva, sva_to);
+		StringValueArray svaTo = new StringValueArray();
+		sva.copyTo(svaTo);
+		assertEquals(sva, svaTo);
 
 		// test mark/reset
 		int size = sva.size();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/ValueArrayTypeInfoTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/ValueArrayTypeInfoTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/ValueArrayTypeInfoTest.java
index 73cecc0..54e9349 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/ValueArrayTypeInfoTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/types/valuearray/ValueArrayTypeInfoTest.java
@@ -21,7 +21,7 @@
 package org.apache.flink.graph.types.valuearray;
 
 import org.apache.flink.api.common.ExecutionConfig;
-import org.apache.flink.api.java.typeutils.runtime.CopyableValueComparator;
+
 import org.junit.Test;
 
 import static org.apache.flink.graph.types.valuearray.ValueArrayTypeInfo.INT_VALUE_ARRAY_TYPE_INFO;
@@ -30,6 +30,9 @@ import static org.apache.flink.graph.types.valuearray.ValueArrayTypeInfo.NULL_VA
 import static org.apache.flink.graph.types.valuearray.ValueArrayTypeInfo.STRING_VALUE_ARRAY_TYPE_INFO;
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link ValueArrayTypeInfo}.
+ */
 public class ValueArrayTypeInfoTest {
 
 	private ExecutionConfig config = new ExecutionConfig();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/utils/proxy/OptionalBooleanTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/utils/proxy/OptionalBooleanTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/utils/proxy/OptionalBooleanTest.java
index a771f1f..3d6bb7a 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/utils/proxy/OptionalBooleanTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/utils/proxy/OptionalBooleanTest.java
@@ -19,12 +19,17 @@
 package org.apache.flink.graph.utils.proxy;
 
 import org.apache.flink.graph.utils.proxy.OptionalBoolean.State;
+
 import org.junit.Before;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for {@link OptionalBoolean}.
+ */
 public class OptionalBooleanTest {
 
 	private OptionalBoolean u;
@@ -61,7 +66,6 @@ public class OptionalBooleanTest {
 		// unset, conflicting
 		assertTrue(u.conflictsWith(c));
 
-
 		// false, unset
 		assertFalse(f.conflictsWith(u));
 
@@ -74,7 +78,6 @@ public class OptionalBooleanTest {
 		// false, conflicting
 		assertTrue(f.conflictsWith(c));
 
-
 		// true, unset
 		assertFalse(t.conflictsWith(u));
 
@@ -87,7 +90,6 @@ public class OptionalBooleanTest {
 		// true, conflicting
 		assertTrue(t.conflictsWith(c));
 
-
 		// conflicting, unset
 		assertTrue(c.conflictsWith(u));
 
@@ -123,7 +125,6 @@ public class OptionalBooleanTest {
 		assertEquals(State.CONFLICTING, u.getState());
 		u.unset();
 
-
 		// false, unset => false
 		f.mergeWith(u);
 		assertEquals(State.FALSE, f.getState());
@@ -142,7 +143,6 @@ public class OptionalBooleanTest {
 		assertEquals(State.CONFLICTING, f.getState());
 		f.set(false);
 
-
 		// true, unset => true
 		t.mergeWith(u);
 		assertEquals(State.TRUE, t.getState());
@@ -161,7 +161,6 @@ public class OptionalBooleanTest {
 		assertEquals(State.CONFLICTING, t.getState());
 		t.set(true);
 
-
 		// conflicting, unset => conflicting
 		c.mergeWith(u);
 		assertEquals(State.CONFLICTING, c.getState());


[10/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
index 71baaa9..670cefb 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Graph.java
@@ -78,7 +78,7 @@ import java.util.NoSuchElementException;
  *
  * @see org.apache.flink.graph.Edge
  * @see org.apache.flink.graph.Vertex
- * 
+ *
  * @param <K> the key type for edge and vertex identifiers
  * @param <VV> the value type for vertices
  * @param <EV> the value type for edges
@@ -91,8 +91,8 @@ public class Graph<K, VV, EV> {
 	private final DataSet<Edge<K, EV>> edges;
 
 	/**
-	 * Creates a graph from two DataSets: vertices and edges
-	 * 
+	 * Creates a graph from two DataSets: vertices and edges.
+	 *
 	 * @param vertices a DataSet of vertices.
 	 * @param edges a DataSet of edges.
 	 * @param context the flink execution environment.
@@ -105,7 +105,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Creates a graph from a Collection of vertices and a Collection of edges.
-	 * 
+	 *
 	 * @param vertices a Collection of vertices.
 	 * @param edges a Collection of edges.
 	 * @param context the flink execution environment.
@@ -122,7 +122,7 @@ public class Graph<K, VV, EV> {
 	 * Creates a graph from a Collection of edges.
 	 * Vertices are created automatically and their values are set to
 	 * NullValue.
-	 * 
+	 *
 	 * @param edges a Collection of edges.
 	 * @param context the flink execution environment.
 	 * @return the newly created graph.
@@ -135,12 +135,12 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Creates a graph from a Collection of edges.
-	 * Vertices are created automatically and their values are set 
+	 * Vertices are created automatically and their values are set
 	 * by applying the provided map function to the vertex IDs.
-	 * 
+	 *
 	 * @param edges a Collection of edges.
 	 * @param vertexValueInitializer a map function that initializes the vertex values.
-	 * It allows to apply a map transformation on the vertex ID to produce an initial vertex value. 
+	 * It allows to apply a map transformation on the vertex ID to produce an initial vertex value.
 	 * @param context the flink execution environment.
 	 * @return the newly created graph.
 	 */
@@ -152,7 +152,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Creates a graph from a DataSet of vertices and a DataSet of edges.
-	 * 
+	 *
 	 * @param vertices a DataSet of vertices.
 	 * @param edges a DataSet of edges.
 	 * @param context the flink execution environment.
@@ -168,7 +168,7 @@ public class Graph<K, VV, EV> {
 	 * Creates a graph from a DataSet of edges.
 	 * Vertices are created automatically and their values are set to
 	 * NullValue.
-	 * 
+	 *
 	 * @param edges a DataSet of edges.
 	 * @param context the flink execution environment.
 	 * @return the newly created graph.
@@ -201,7 +201,7 @@ public class Graph<K, VV, EV> {
 	 * Creates a graph from a DataSet of edges.
 	 * Vertices are created automatically and their values are set
 	 * by applying the provided map function to the vertex IDs.
-	 * 
+	 *
 	 * @param edges a DataSet of edges.
 	 * @param vertexValueInitializer the mapper function that initializes the vertex values.
 	 * It allows to apply a map transformation on the vertex ID to produce an initial vertex value.
@@ -251,15 +251,15 @@ public class Graph<K, VV, EV> {
 	}
 
 	/**
-	 * Creates a graph from a DataSet of Tuple2 objects for vertices and 
+	 * Creates a graph from a DataSet of Tuple2 objects for vertices and
 	 * Tuple3 objects for edges.
-	 * <p>
-	 * The first field of the Tuple2 vertex object will become the vertex ID
+	 *
+	 * <p>The first field of the Tuple2 vertex object will become the vertex ID
 	 * and the second field will become the vertex value.
 	 * The first field of the Tuple3 object for edges will become the source ID,
 	 * the second field will become the target ID, and the third field will become
 	 * the edge value.
-	 * 
+	 *
 	 * @param vertices a DataSet of Tuple2 representing the vertices.
 	 * @param edges a DataSet of Tuple3 representing the edges.
 	 * @param context the flink execution environment.
@@ -281,13 +281,13 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Creates a graph from a DataSet of Tuple3 objects for edges.
-	 * <p>
-	 * The first field of the Tuple3 object will become the source ID,
+	 *
+	 * <p>The first field of the Tuple3 object will become the source ID,
 	 * the second field will become the target ID, and the third field will become
 	 * the edge value.
-	 * <p>
-	 * Vertices are created automatically and their values are set to NullValue.
-	 * 
+	 *
+	 * <p>Vertices are created automatically and their values are set to NullValue.
+	 *
 	 * @param edges a DataSet of Tuple3 representing the edges.
 	 * @param context the flink execution environment.
 	 * @return the newly created graph.
@@ -304,14 +304,14 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Creates a graph from a DataSet of Tuple3 objects for edges.
-	 * <p>
-	 * Each Tuple3 will become one Edge, where the source ID will be the first field of the Tuple2,
+	 *
+	 * <p>Each Tuple3 will become one Edge, where the source ID will be the first field of the Tuple2,
 	 * the target ID will be the second field of the Tuple2
 	 * and the Edge value will be the third field of the Tuple3.
-	 * <p>
-	 * Vertices are created automatically and their values are initialized
+	 *
+	 * <p>Vertices are created automatically and their values are initialized
 	 * by applying the provided vertexValueInitializer map function to the vertex IDs.
-	 * 
+	 *
 	 * @param edges a DataSet of Tuple3.
 	 * @param vertexValueInitializer the mapper function that initializes the vertex values.
 	 * It allows to apply a map transformation on the vertex ID to produce an initial vertex value.
@@ -332,9 +332,9 @@ public class Graph<K, VV, EV> {
 	 * Creates a graph from a DataSet of Tuple2 objects for edges.
 	 * Each Tuple2 will become one Edge, where the source ID will be the first field of the Tuple2
 	 * and the target ID will be the second field of the Tuple2.
-	 * <p>
-	 * Edge value types and Vertex values types will be set to NullValue.
-	 * 
+	 *
+	 * <p>Edge value types and Vertex values types will be set to NullValue.
+	 *
 	 * @param edges a DataSet of Tuple2.
 	 * @param context the flink execution environment.
 	 * @return the newly created graph.
@@ -353,10 +353,10 @@ public class Graph<K, VV, EV> {
 	 * Creates a graph from a DataSet of Tuple2 objects for edges.
 	 * Each Tuple2 will become one Edge, where the source ID will be the first field of the Tuple2
 	 * and the target ID will be the second field of the Tuple2.
-	 * <p>
-	 * Edge value types will be set to NullValue.
+	 *
+	 * <p>Edge value types will be set to NullValue.
 	 * Vertex values can be initialized by applying a user-defined map function on the vertex IDs.
-	 * 
+	 *
 	 * @param edges a DataSet of Tuple2, where the first field corresponds to the source ID
 	 * and the second field corresponds to the target ID.
 	 * @param vertexValueInitializer the mapper function that initializes the vertex values.
@@ -376,13 +376,13 @@ public class Graph<K, VV, EV> {
 
 	/**
 	* Creates a Graph from a CSV file of vertices and a CSV file of edges.
-	* 
+	*
 	* @param verticesPath path to a CSV file with the Vertex data.
 	* @param edgesPath path to a CSV file with the Edge data
 	* @param context the Flink execution environment.
-	* @return An instance of {@link org.apache.flink.graph.GraphCsvReader}, 
+	* @return An instance of {@link org.apache.flink.graph.GraphCsvReader},
 	* on which calling methods to specify types of the Vertex ID, Vertex value and Edge value returns a Graph.
-	* 
+	*
 	* @see org.apache.flink.graph.GraphCsvReader#types(Class, Class, Class)
 	* @see org.apache.flink.graph.GraphCsvReader#vertexTypes(Class, Class)
 	* @see org.apache.flink.graph.GraphCsvReader#edgeTypes(Class, Class)
@@ -392,14 +392,14 @@ public class Graph<K, VV, EV> {
 		return new GraphCsvReader(verticesPath, edgesPath, context);
 	}
 
-	/** 
+	/**
 	* Creates a graph from a CSV file of edges. Vertices will be created automatically.
 	*
 	* @param edgesPath a path to a CSV file with the Edges data
 	* @param context the execution environment.
 	* @return An instance of {@link org.apache.flink.graph.GraphCsvReader},
 	* on which calling methods to specify types of the Vertex ID, Vertex value and Edge value returns a Graph.
-	* 
+	*
 	* @see org.apache.flink.graph.GraphCsvReader#types(Class, Class, Class)
 	* @see org.apache.flink.graph.GraphCsvReader#vertexTypes(Class, Class)
 	* @see org.apache.flink.graph.GraphCsvReader#edgeTypes(Class, Class)
@@ -409,7 +409,7 @@ public class Graph<K, VV, EV> {
 		return new GraphCsvReader(edgesPath, context);
 	}
 
-	/** 
+	/**
 	 * Creates a graph from a CSV file of edges. Vertices will be created automatically and
 	 * Vertex values can be initialized using a user-defined mapper.
 	 *
@@ -419,7 +419,7 @@ public class Graph<K, VV, EV> {
 	 * @param context the execution environment.
 	 * @return An instance of {@link org.apache.flink.graph.GraphCsvReader},
 	 * on which calling methods to specify types of the Vertex ID, Vertex Value and Edge value returns a Graph.
-	 * 
+	 *
 	 * @see org.apache.flink.graph.GraphCsvReader#types(Class, Class, Class)
 	 * @see org.apache.flink.graph.GraphCsvReader#vertexTypes(Class, Class)
 	 * @see org.apache.flink.graph.GraphCsvReader#edgeTypes(Class, Class)
@@ -440,7 +440,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Function that checks whether a Graph is a valid Graph,
 	 * as defined by the given {@link GraphValidator}.
-	 * 
+	 *
 	 * @return true if the Graph is valid.
 	 */
 	public Boolean validate(GraphValidator<K, VV, EV> validator) throws Exception {
@@ -520,7 +520,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Apply a function to the attribute of each vertex in the graph.
-	 * 
+	 *
 	 * @param mapper the map function to apply.
 	 * @return a new graph
 	 */
@@ -570,7 +570,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Apply a function to the attribute of each edge in the graph.
-	 * 
+	 *
 	 * @param mapper the map function to apply.
 	 * @return a new graph
 	 */
@@ -659,19 +659,19 @@ public class Graph<K, VV, EV> {
 	 * Joins the vertex DataSet of this graph with an input Tuple2 DataSet and applies
 	 * a user-defined transformation on the values of the matched records.
 	 * The vertex ID and the first field of the Tuple2 DataSet are used as the join keys.
-	 * 
+	 *
 	 * @param inputDataSet the Tuple2 DataSet to join with.
 	 * The first field of the Tuple2 is used as the join key and the second field is passed
-	 * as a parameter to the transformation function. 
+	 * as a parameter to the transformation function.
 	 * @param vertexJoinFunction the transformation function to apply.
 	 * The first parameter is the current vertex value and the second parameter is the value
 	 * of the matched Tuple2 from the input DataSet.
 	 * @return a new Graph, where the vertex values have been updated according to the
 	 * result of the vertexJoinFunction.
-	 * 
+	 *
 	 * @param <T> the type of the second field of the input Tuple2 DataSet.
 	*/
-	public <T> Graph<K, VV, EV> joinWithVertices(DataSet<Tuple2<K, T>> inputDataSet, 
+	public <T> Graph<K, VV, EV> joinWithVertices(DataSet<Tuple2<K, T>> inputDataSet,
 			final VertexJoinFunction<VV, T> vertexJoinFunction) {
 
 		DataSet<Vertex<K, VV>> resultedVertices = this.getVertices()
@@ -714,10 +714,10 @@ public class Graph<K, VV, EV> {
 	 * Joins the edge DataSet with an input DataSet on the composite key of both
 	 * source and target IDs and applies a user-defined transformation on the values
 	 * of the matched records. The first two fields of the input DataSet are used as join keys.
-	 * 
+	 *
 	 * @param inputDataSet the DataSet to join with.
 	 * The first two fields of the Tuple3 are used as the composite join key
-	 * and the third field is passed as a parameter to the transformation function. 
+	 * and the third field is passed as a parameter to the transformation function.
 	 * @param edgeJoinFunction the transformation function to apply.
 	 * The first parameter is the current edge value and the second parameter is the value
 	 * of the matched Tuple3 from the input DataSet.
@@ -771,10 +771,10 @@ public class Graph<K, VV, EV> {
 	 * Joins the edge DataSet with an input Tuple2 DataSet and applies a user-defined transformation
 	 * on the values of the matched records.
 	 * The source ID of the edges input and the first field of the input DataSet are used as join keys.
-	 * 
+	 *
 	 * @param inputDataSet the DataSet to join with.
 	 * The first field of the Tuple2 is used as the join key
-	 * and the second field is passed as a parameter to the transformation function. 
+	 * and the second field is passed as a parameter to the transformation function.
 	 * @param edgeJoinFunction the transformation function to apply.
 	 * The first parameter is the current edge value and the second parameter is the value
 	 * of the matched Tuple2 from the input DataSet.
@@ -834,10 +834,10 @@ public class Graph<K, VV, EV> {
 	 * Joins the edge DataSet with an input Tuple2 DataSet and applies a user-defined transformation
 	 * on the values of the matched records.
 	 * The target ID of the edges input and the first field of the input DataSet are used as join keys.
-	 * 
+	 *
 	 * @param inputDataSet the DataSet to join with.
 	 * The first field of the Tuple2 is used as the join key
-	 * and the second field is passed as a parameter to the transformation function. 
+	 * and the second field is passed as a parameter to the transformation function.
 	 * @param edgeJoinFunction the transformation function to apply.
 	 * The first parameter is the current edge value and the second parameter is the value
 	 * of the matched Tuple2 from the input DataSet.
@@ -859,7 +859,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Apply filtering functions to the graph and return a sub-graph that
 	 * satisfies the predicates for both vertices and edges.
-	 * 
+	 *
 	 * @param vertexFilter the filter function for vertices.
 	 * @param edgeFilter the filter function for edges.
 	 * @return the resulting sub-graph.
@@ -881,7 +881,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Apply a filtering function to the graph and return a sub-graph that
 	 * satisfies the predicates only for the vertices.
-	 * 
+	 *
 	 * @param vertexFilter the filter function for vertices.
 	 * @return the resulting sub-graph.
 	 */
@@ -900,7 +900,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Apply a filtering function to the graph and return a sub-graph that
 	 * satisfies the predicates only for the edges.
-	 * 
+	 *
 	 * @param edgeFilter the filter function for edges.
 	 * @return the resulting sub-graph.
 	 */
@@ -919,8 +919,8 @@ public class Graph<K, VV, EV> {
 	}
 
 	/**
-	 * Return the out-degree of all vertices in the graph
-	 * 
+	 * Return the out-degree of all vertices in the graph.
+	 *
 	 * @return A DataSet of {@code Tuple2<vertexId, outDegree>}
 	 */
 	public DataSet<Tuple2<K, LongValue>> outDegrees() {
@@ -946,7 +946,7 @@ public class Graph<K, VV, EV> {
 
 			Iterator<Vertex<K, VV>> vertexIterator = vertex.iterator();
 
-			if(vertexIterator.hasNext()) {
+			if (vertexIterator.hasNext()) {
 				vertexDegree.f0 = vertexIterator.next().f0;
 				out.collect(vertexDegree);
 			} else {
@@ -956,8 +956,8 @@ public class Graph<K, VV, EV> {
 	}
 
 	/**
-	 * Return the in-degree of all vertices in the graph
-	 * 
+	 * Return the in-degree of all vertices in the graph.
+	 *
 	 * @return A DataSet of {@code Tuple2<vertexId, inDegree>}
 	 */
 	public DataSet<Tuple2<K, LongValue>> inDegrees() {
@@ -967,8 +967,8 @@ public class Graph<K, VV, EV> {
 	}
 
 	/**
-	 * Return the degree of all vertices in the graph
-	 * 
+	 * Return the degree of all vertices in the graph.
+	 *
 	 * @return A DataSet of {@code Tuple2<vertexId, degree>}
 	 */
 	public DataSet<Tuple2<K, LongValue>> getDegrees() {
@@ -979,7 +979,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * This operation adds all inverse-direction edges to the graph.
-	 * 
+	 *
 	 * @return the undirected graph.
 	 */
 	public Graph<K, VV, EV> getUndirected() {
@@ -993,10 +993,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the edge values of each vertex.
 	 * The edgesFunction applied on the edges has access to both the id and the value
 	 * of the grouping vertex.
-	 * 
-	 * For each vertex, the edgesFunction can iterate over all edges of this vertex
+	 *
+	 * <p>For each vertex, the edgesFunction can iterate over all edges of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param edgesFunction the group reduce function to apply to the neighboring edges of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
 	 * @param <T> the output type
@@ -1027,10 +1027,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the edge values of each vertex.
 	 * The edgesFunction applied on the edges has access to both the id and the value
 	 * of the grouping vertex.
-	 * 
-	 * For each vertex, the edgesFunction can iterate over all edges of this vertex
+	 *
+	 * <p>For each vertex, the edgesFunction can iterate over all edges of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param edgesFunction the group reduce function to apply to the neighboring edges of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
 	 * @param <T> the output type
@@ -1064,10 +1064,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the edge values of each vertex.
 	 * The edgesFunction applied on the edges only has access to the vertex id (not the vertex value)
 	 * of the grouping vertex.
-	 * 
-	 * For each vertex, the edgesFunction can iterate over all edges of this vertex
+	 *
+	 * <p>For each vertex, the edgesFunction can iterate over all edges of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param edgesFunction the group reduce function to apply to the neighboring edges of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
 	 * @param <T> the output type
@@ -1089,10 +1089,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the edge values of each vertex.
 	 * The edgesFunction applied on the edges only has access to the vertex id (not the vertex value)
 	 * of the grouping vertex.
-	 * 
-	 * For each vertex, the edgesFunction can iterate over all edges of this vertex
+	 *
+	 * <p>For each vertex, the edgesFunction can iterate over all edges of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param edgesFunction the group reduce function to apply to the neighboring edges of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
 	 * @param <T> the output type
@@ -1207,12 +1207,13 @@ public class Graph<K, VV, EV> {
 
 			Iterator<Vertex<K, VV>> vertexIterator = vertex.iterator();
 
-			if(vertexIterator.hasNext()) {
+			if (vertexIterator.hasNext()) {
 				function.iterateEdges(vertexIterator.next(), edges, out);
 			} else {
 				throw new NoSuchElementException("The edge src/trg id could not be found within the vertexIds");
 			}
 		}
+
 		@Override
 		public TypeInformation<T> getProducedType() {
 			return TypeExtractor.createTypeInfo(EdgesFunctionWithVertexValue.class, function.getClass(), 3,
@@ -1260,7 +1261,7 @@ public class Graph<K, VV, EV> {
 
 			Iterator<Vertex<K, VV>> vertexIterator = vertex.iterator();
 
-			if(vertexIterator.hasNext()) {
+			if (vertexIterator.hasNext()) {
 				function.iterateEdges(vertexIterator.next(), edgesIterable, out);
 			} else {
 				throw new NoSuchElementException("The edge src/trg id could not be found within the vertexIds");
@@ -1299,8 +1300,8 @@ public class Graph<K, VV, EV> {
 	}
 
 	/**
-	 * Reverse the direction of the edges in the graph
-	 * 
+	 * Reverse the direction of the edges in the graph.
+	 *
 	 * @return a new graph with all edges reversed
 	 * @throws UnsupportedOperationException
 	 */
@@ -1357,7 +1358,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Adds the input vertex to the graph. If the vertex already
 	 * exists in the graph, it will not be added again.
-	 * 
+	 *
 	 * @param vertex the vertex to be added
 	 * @return the new graph containing the existing vertices as well as the one just added
 	 */
@@ -1404,7 +1405,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Adds the given edge to the graph. If the source and target vertices do
 	 * not exist in the graph, they will also be added.
-	 * 
+	 *
 	 * @param source the source vertex of the edge
 	 * @param target the target vertex of the edge
 	 * @param edgeValue the edge value
@@ -1421,7 +1422,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Adds the given list edges to the graph.
 	 *
-	 * When adding an edge for a non-existing set of vertices, the edge is considered invalid and ignored.
+	 * <p>When adding an edge for a non-existing set of vertices, the edge is considered invalid and ignored.
 	 *
 	 * @param newEdges the data set of edges to be added
 	 * @return a new graph containing the existing edges plus the newly added edges.
@@ -1461,7 +1462,7 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Removes the given vertex and its edges from the graph.
-	 * 
+	 *
 	 * @param vertex the vertex to remove
 	 * @return the new graph containing the existing vertices and edges without
 	 *         the removed vertex and its edges
@@ -1481,8 +1482,7 @@ public class Graph<K, VV, EV> {
 	 * 		   and edges removed.
 	 */
 
-	public Graph<K, VV, EV> removeVertices(List<Vertex<K, VV>> verticesToBeRemoved)
-	{
+	public Graph<K, VV, EV> removeVertices(List<Vertex<K, VV>> verticesToBeRemoved) {
 		return removeVertices(this.context.fromCollection(verticesToBeRemoved));
 	}
 
@@ -1537,7 +1537,7 @@ public class Graph<K, VV, EV> {
 
 	 /**
 	 * Removes all edges that match the given edge from the graph.
-	 * 
+	 *
 	 * @param edge the edge to remove
 	 * @return the new graph containing the existing vertices and edges without
 	 *         the removed edges
@@ -1592,7 +1592,7 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Performs union on the vertices and edges sets of the input graphs
 	 * removing duplicate vertices but maintaining duplicate edges.
-	 * 
+	 *
 	 * @param graph the graph to perform union with
 	 * @return a new graph
 	 */
@@ -1616,7 +1616,7 @@ public class Graph<K, VV, EV> {
 	 * Performs Difference on the vertex and edge sets of the input graphs
 	 * removes common vertices and edges. If a source/target vertex is removed,
 	 * its corresponding edge will also be removed
-	 * 
+	 *
 	 * @param graph the graph to perform difference with
 	 * @return a new graph where the common vertices and edges have been removed
 	 */
@@ -1628,15 +1628,15 @@ public class Graph<K, VV, EV> {
 	/**
 	 * Performs intersect on the edge sets of the input graphs. Edges are considered equal, if they
 	 * have the same source identifier, target identifier and edge value.
-	 * <p>
-	 * The method computes pairs of equal edges from the input graphs. If the same edge occurs
+	 *
+	 * <p>The method computes pairs of equal edges from the input graphs. If the same edge occurs
 	 * multiple times in the input graphs, there will be multiple edge pairs to be considered. Each
 	 * edge instance can only be part of one pair. If the given parameter {@code distinctEdges} is set
 	 * to {@code true}, there will be exactly one edge in the output graph representing all pairs of
 	 * equal edges. If the parameter is set to {@code false}, both edges of each pair will be in the
 	 * output.
-	 * <p>
-	 * Vertices in the output graph will have no vertex values.
+	 *
+	 * <p>Vertices in the output graph will have no vertex values.
 	 *
 	 * @param graph the graph to perform intersect with
 	 * @param distinctEdges if set to {@code true}, there will be exactly one edge in the output graph
@@ -1709,7 +1709,7 @@ public class Graph<K, VV, EV> {
 			Iterator<Edge<K, EV>> rightIt = edgesRight.iterator();
 
 			// collect pairs once
-			while(leftIt.hasNext() && rightIt.hasNext()) {
+			while (leftIt.hasNext() && rightIt.hasNext()) {
 				out.collect(leftIt.next());
 				out.collect(rightIt.next());
 			}
@@ -1723,7 +1723,7 @@ public class Graph<K, VV, EV> {
 	 * @param scatterFunction the scatter function
 	 * @param gatherFunction the gather function
 	 * @param maximumNumberOfIterations maximum number of iterations to perform
-	 * 
+	 *
 	 * @return the updated Graph after the scatter-gather iteration has converged or
 	 * after maximumNumberOfIterations.
 	 */
@@ -1743,7 +1743,7 @@ public class Graph<K, VV, EV> {
 	 * @param gatherFunction the gather function
 	 * @param maximumNumberOfIterations maximum number of iterations to perform
 	 * @param parameters the iteration configuration parameters
-	 * 
+	 *
 	 * @return the updated Graph after the scatter-gather iteration has converged or
 	 * after maximumNumberOfIterations.
 	 */
@@ -1818,12 +1818,12 @@ public class Graph<K, VV, EV> {
 	 * @param computeFunction the vertex compute function
 	 * @param combiner an optional message combiner
 	 * @param maximumNumberOfIterations maximum number of iterations to perform
-	 * 
+	 *
 	 * @return the updated Graph after the vertex-centric iteration has converged or
 	 * after maximumNumberOfIterations.
 	 */
 	public <M> Graph<K, VV, EV> runVertexCentricIteration(
-			ComputeFunction<K, VV, EV, M> computeFunction, 
+			ComputeFunction<K, VV, EV, M> computeFunction,
 			MessageCombiner<K, M> combiner, int maximumNumberOfIterations) {
 
 		return this.runVertexCentricIteration(computeFunction, combiner, maximumNumberOfIterations, null);
@@ -1831,12 +1831,12 @@ public class Graph<K, VV, EV> {
 
 	/**
 	 * Runs a {@link VertexCentricIteration} on the graph with configuration options.
-	 * 
+	 *
 	 * @param computeFunction the vertex compute function
 	 * @param combiner an optional message combiner
 	 * @param maximumNumberOfIterations maximum number of iterations to perform
 	 * @param parameters the {@link VertexCentricConfiguration} parameters
-	 * 
+	 *
 	 * @return the updated Graph after the vertex-centric iteration has converged or
 	 * after maximumNumberOfIterations.
 	 */
@@ -1881,10 +1881,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the neighbors (both edges and vertices)
 	 * of each vertex. The neighborsFunction applied on the neighbors only has access to both the vertex id
 	 * and the vertex value of the grouping vertex.
-	 * 
-	 * For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
+	 *
+	 * <p>For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param neighborsFunction the group reduce function to apply to the neighboring edges and vertices
 	 * of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
@@ -1928,10 +1928,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the neighbors (both edges and vertices)
 	 * of each vertex. The neighborsFunction applied on the neighbors only has access to both the vertex id
 	 * and the vertex value of the grouping vertex.
-	 * 
-	 * For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
+	 *
+	 * <p>For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param neighborsFunction the group reduce function to apply to the neighboring edges and vertices
 	 * of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
@@ -1979,10 +1979,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the neighbors (both edges and vertices)
 	 * of each vertex. The neighborsFunction applied on the neighbors only has access to the vertex id
 	 * (not the vertex value) of the grouping vertex.
-	 * 
-	 * For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
+	 *
+	 * <p>For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param neighborsFunction the group reduce function to apply to the neighboring edges and vertices
 	 * of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
@@ -2027,10 +2027,10 @@ public class Graph<K, VV, EV> {
 	 * Groups by vertex and computes a GroupReduce transformation over the neighbors (both edges and vertices)
 	 * of each vertex. The neighborsFunction applied on the neighbors only has access to the vertex id
 	 * (not the vertex value) of the grouping vertex.
-	 * 
-	 * For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
+	 *
+	 * <p>For each vertex, the neighborsFunction can iterate over all neighbors of this vertex
 	 * with the specified direction, and emit any number of output elements, including none.
-	 * 
+	 *
 	 * @param neighborsFunction the group reduce function to apply to the neighboring edges and vertices
 	 * of each vertex.
 	 * @param direction the edge direction (in-, out-, all-).
@@ -2105,7 +2105,7 @@ public class Graph<K, VV, EV> {
 		}
 
 		@SuppressWarnings("unchecked")
-		public void join(Edge<K, EV> edge, Vertex<K, VV> otherVertex, 
+		public void join(Edge<K, EV> edge, Vertex<K, VV> otherVertex,
 				Collector<Tuple2<K, VV>> out) {
 			out.collect(new Tuple2<>((K) edge.getField(fieldPosition), otherVertex.getValue()));
 		}
@@ -2180,7 +2180,7 @@ public class Graph<K, VV, EV> {
 		}
 
 		public void coGroup(Iterable<Vertex<K, VV>> vertex,
-				final Iterable<Tuple3<K, Edge<K, EV>, Vertex<K, VV>>> keysWithNeighbors, 
+				final Iterable<Tuple3<K, Edge<K, EV>, Vertex<K, VV>>> keysWithNeighbors,
 				Collector<T> out) throws Exception {
 
 			final Iterator<Tuple2<Edge<K, EV>, Vertex<K, VV>>> neighborsIterator = new Iterator<Tuple2<Edge<K, EV>, Vertex<K, VV>>>() {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalytic.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalytic.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalytic.java
index e72b853..46875b6 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalytic.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalytic.java
@@ -35,7 +35,7 @@ import org.apache.flink.api.java.DataSet;
 public interface GraphAnalytic<K, VV, EV, T> {
 
 	/**
-	 * This method must be called after the program has executed:
+	 * This method must be called after the program has executed.
 	 *  1) "run" analytics and algorithms
 	 *  2) call ExecutionEnvironment.execute()
 	 *  3) get analytic results

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java
new file mode 100644
index 0000000..2e7c5b2
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphAnalyticBase.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph;
+
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.util.Preconditions;
+
+/**
+ * Base class for {@link GraphAnalytic}.
+ *
+ * @param <K> key type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
+ * @param <T> the return type
+ */
+public abstract class GraphAnalyticBase<K, VV, EV, T>
+implements GraphAnalytic<K, VV, EV, T> {
+
+	protected ExecutionEnvironment env;
+
+	@Override
+	public GraphAnalytic<K, VV, EV, T> run(Graph<K, VV, EV> input)
+			throws Exception {
+		env = input.getContext();
+		return this;
+	}
+
+	@Override
+	public T execute()
+			throws Exception {
+		env.execute();
+		return getResult();
+	}
+
+	@Override
+	public T execute(String jobName)
+			throws Exception {
+		Preconditions.checkNotNull(jobName);
+
+		env.execute(jobName);
+		return getResult();
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java
index 6547f9a..6f5570f 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java
@@ -85,7 +85,6 @@ public class GraphCsvReader {
 				new Path(Preconditions.checkNotNull(edgePath, "The file path may not be null.")), context);
 	}
 
-
 	public <K, VV> GraphCsvReader(String edgePath, final MapFunction<K, VV> mapper, ExecutionEnvironment context) {
 			this(new Path(Preconditions.checkNotNull(edgePath, "The file path may not be null.")), mapper, context);
 	}
@@ -93,7 +92,7 @@ public class GraphCsvReader {
 	/**
 	 * Creates a Graph from CSV input with vertex values and edge values.
 	 * The vertex values are specified through a vertices input file or a user-defined map function.
-	 * 
+	 *
 	 * @param vertexKey the type of the vertex IDs
 	 * @param vertexValue the type of the vertex values
 	 * @param edgeValue the type of the edge values
@@ -226,7 +225,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader lineDelimiterVertices(String delimiter) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.lineDelimiter(delimiter);
 		}
 		return this;
@@ -240,7 +239,7 @@ public class GraphCsvReader {
 	 * @return The GraphCsv reader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader fieldDelimiterVertices(String delimiter) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.fieldDelimiter(delimiter);
 		}
 		return this;
@@ -280,7 +279,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader parseQuotedStringsVertices(char quoteCharacter) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.parseQuotedStrings(quoteCharacter);
 		}
 		return this;
@@ -295,7 +294,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader ignoreCommentsVertices(String commentPrefix) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.ignoreComments(commentPrefix);
 		}
 		return this;
@@ -327,7 +326,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader includeFieldsVertices(boolean ... vertexFields) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.includeFields(vertexFields);
 		}
 		return this;
@@ -364,7 +363,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader includeFieldsVertices(String mask) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.includeFields(mask);
 		}
 		return this;
@@ -396,8 +395,8 @@ public class GraphCsvReader {
 	 * non-zero bit.
 	 * The parser will skip over all fields where the character at the corresponding bit is zero, and
 	 * include the fields where the corresponding bit is one.
-	 * <p>
-	 * Examples:
+	 *
+	 * <p>Examples:
 	 * <ul>
 	 *   <li>A mask of {@code 0x7} would include the first three fields.</li>
 	 *   <li>A mask of {@code 0x26} (binary {@code 100110} would skip the first fields, include fields
@@ -408,7 +407,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader includeFieldsVertices(long mask) {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.includeFields(mask);
 		}
 		return this;
@@ -422,8 +421,8 @@ public class GraphCsvReader {
 	 * non-zero bit.
 	 * The parser will skip over all fields where the character at the corresponding bit is zero, and
 	 * include the fields where the corresponding bit is one.
-	 * <p>
-	 * Examples:
+	 *
+	 * <p>Examples:
 	 * <ul>
 	 *   <li>A mask of {@code 0x7} would include the first three fields.</li>
 	 *   <li>A mask of {@code 0x26} (binary {@code 100110} would skip the first fields, include fields
@@ -454,7 +453,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader ignoreFirstLineVertices() {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.ignoreFirstLine();
 		}
 		return this;
@@ -478,7 +477,7 @@ public class GraphCsvReader {
 	 * @return The GraphCSVReader instance itself, to allow for fluent function chaining.
 	 */
 	public GraphCsvReader ignoreInvalidLinesVertices() {
-		if(this.vertexReader != null) {
+		if (this.vertexReader != null) {
 			this.vertexReader.ignoreInvalidLines();
 		}
 		return this;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/IterationConfiguration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/IterationConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/IterationConfiguration.java
index 964d20e..85bc9ef 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/IterationConfiguration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/IterationConfiguration.java
@@ -18,38 +18,38 @@
 
 package org.apache.flink.graph;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.flink.api.common.aggregators.Aggregator;
 import org.apache.flink.graph.spargel.GatherFunction;
 import org.apache.flink.util.Preconditions;
 
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * This is used as a base class for vertex-centric iteration or gather-sum-apply iteration configuration.
  */
 public abstract class IterationConfiguration {
 
-	/** the iteration name **/
+	// the iteration name
 	private String name;
 
-	/** the iteration parallelism **/
+	// the iteration parallelism
 	private int parallelism = -1;
 
-	/** the iteration aggregators **/
+	// the iteration aggregators
 	private Map<String, Aggregator<?>> aggregators = new HashMap<>();
 
-	/** flag that defines whether the solution set is kept in managed memory **/
+	// flag that defines whether the solution set is kept in managed memory
 	private boolean unmanagedSolutionSet = false;
 
-	/** flag that defines whether the number of vertices option is set **/
+	// flag that defines whether the number of vertices option is set
 	private boolean optNumVertices = false;
-	
+
 	public IterationConfiguration() {}
 
 	/**
 	 * Sets the name for the iteration. The name is displayed in logs and messages.
-	 * 
+	 *
 	 * @param name The name for the iteration.
 	 */
 	public void setName(String name) {
@@ -58,13 +58,13 @@ public abstract class IterationConfiguration {
 
 	/**
 	 * Gets the name of the iteration.
-	 * @param defaultName 
-	 * 
+	 * @param defaultName
+	 *
 	 * @return The name of the iteration.
 	 */
 	public String getName(String defaultName) {
 		if (name != null) {
-			return name;			
+			return name;
 		}
 		else {
 			return defaultName;
@@ -73,17 +73,17 @@ public abstract class IterationConfiguration {
 
 	/**
 	 * Sets the parallelism for the iteration.
-	 * 
+	 *
 	 * @param parallelism The parallelism.
 	 */
 	public void setParallelism(int parallelism) {
 		Preconditions.checkArgument(parallelism > 0 || parallelism == -1, "The parallelism must be positive, or -1 (use default).");
 		this.parallelism = parallelism;
 	}
-	
+
 	/**
 	 * Gets the iteration's parallelism.
-	 * 
+	 *
 	 * @return The iterations parallelism, or -1, if not set.
 	 */
 	public int getParallelism() {
@@ -94,18 +94,18 @@ public abstract class IterationConfiguration {
 	 * Defines whether the solution set is kept in managed memory (Flink's internal way of keeping object
 	 * in serialized form) or as a simple object map.
 	 * By default, the solution set runs in managed memory.
-	 * 
+	 *
 	 * @param unmanaged True, to keep the solution set in unmanaged memory, false otherwise.
 	 */
 	public void setSolutionSetUnmanagedMemory(boolean unmanaged) {
 		this.unmanagedSolutionSet = unmanaged;
 	}
-	
+
 	/**
 	 * Gets whether the solution set is kept in managed memory (Flink's internal way of keeping object
 	 * in serialized form) or as a simple object map.
 	 * By default, the solution set runs in managed memory.
-	 * 
+	 *
 	 * @return True, if the solution set is in unmanaged memory, false otherwise.
 	 */
 	public boolean isSolutionSetUnmanagedMemory() {
@@ -136,8 +136,8 @@ public abstract class IterationConfiguration {
 	 * Registers a new aggregator. Aggregators registered here are available during the execution of the vertex updates
 	 * via {@link GatherFunction#getIterationAggregator(String)} and
 	 * {@link GatherFunction#getPreviousIterationAggregate(String)}.
-	 * 
-	 * @param name The name of the aggregator, used to retrieve it and its aggregates during execution. 
+	 *
+	 * @param name The name of the aggregator, used to retrieve it and its aggregates during execution.
 	 * @param aggregator The aggregator.
 	 */
 	public void registerAggregator(String name, Aggregator<?> aggregator) {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunction.java
index 1a32204..01ea9d6 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunction.java
@@ -18,12 +18,12 @@
 
 package org.apache.flink.graph;
 
-import java.io.Serializable;
-
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.util.Collector;
 
+import java.io.Serializable;
+
 /**
  * Interface to be implemented by the function applied to a vertex neighborhood
  * in the {@link Graph#groupReduceOnNeighbors(NeighborsFunction, EdgeDirection)}
@@ -39,16 +39,16 @@ public interface NeighborsFunction<K, VV, EV, O> extends Function, Serializable
 	/**
 	 * This method is called per vertex and can iterate over all of its neighbors
 	 * with the specified direction.
-	 * <p>
-	 * If called with {@link EdgeDirection#OUT} the group will contain
+	 *
+	 * <p>If called with {@link EdgeDirection#OUT} the group will contain
 	 * the out-edges and neighboring vertices of the grouping vertex.
 	 * If called with {@link EdgeDirection#IN} the group will contain
 	 * the in-edges and neighboring vertices of the grouping vertex.
 	 * If called with {@link EdgeDirection#ALL} the group will contain
 	 * all edges and neighboring vertices of the grouping vertex.
-	 * <p>
-	 * The method can emit any number of output elements, including none.
-	 * 
+	 *
+	 * <p>The method can emit any number of output elements, including none.
+	 *
 	 * @param neighbors the neighbors of the grouping vertex.
 	 * The first filed of each Tuple3 is the ID of the grouping vertex.
 	 * The second field is the neighboring edge, and the third field is the neighboring vertex.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunctionWithVertexValue.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunctionWithVertexValue.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunctionWithVertexValue.java
index 657238c..41aedb0 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunctionWithVertexValue.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/NeighborsFunctionWithVertexValue.java
@@ -18,12 +18,12 @@
 
 package org.apache.flink.graph;
 
-import java.io.Serializable;
-
 import org.apache.flink.api.common.functions.Function;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.util.Collector;
 
+import java.io.Serializable;
+
 /**
  * Interface to be implemented by the function applied to a vertex neighborhood
  * in the {@link Graph#groupReduceOnNeighbors(NeighborsFunctionWithVertexValue, EdgeDirection)}
@@ -39,16 +39,16 @@ public interface NeighborsFunctionWithVertexValue<K, VV, EV, O> extends Function
 	/**
 	 * This method is called per vertex and can iterate over all of its neighbors
 	 * with the specified direction.
-	 * <p>
-	 * If called with {@link EdgeDirection#OUT} the group will contain
+	 *
+	 * <p>If called with {@link EdgeDirection#OUT} the group will contain
 	 * the out-edges and neighboring vertices of the grouping vertex.
 	 * If called with {@link EdgeDirection#IN} the group will contain
 	 * the in-edges and neighboring vertices of the grouping vertex.
 	 * If called with {@link EdgeDirection#ALL} the group will contain
 	 * all edges and neighboring vertices of the grouping vertex.
-	 * <p>
-	 * The method can emit any number of output elements, including none.
-	 * 
+	 *
+	 * <p>The method can emit any number of output elements, including none.
+	 *
 	 * @param vertex the grouping Vertex
 	 * @param neighbors the neighbors of the grouping vertex.
 	 * The first filed of each Tuple3 is the ID of the grouping vertex.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceEdgesFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceEdgesFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceEdgesFunction.java
index e7631a1..0cc6e72 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceEdgesFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceEdgesFunction.java
@@ -34,7 +34,7 @@ public interface ReduceEdgesFunction<EV> extends Function, Serializable {
 	 * It combines two neighboring edge values into one new value of the same type.
 	 * For each vertex, this function is consecutively called,
 	 * until only a single value for each edge remains.
-	 * 
+	 *
 	 * @param firstEdgeValue the first neighboring edge value to combine
 	 * @param secondEdgeValue the second neighboring edge value to combine
 	 * @return the combined value of both input values

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceNeighborsFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceNeighborsFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceNeighborsFunction.java
index 5b423e2..cb4ee60 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceNeighborsFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/ReduceNeighborsFunction.java
@@ -35,7 +35,7 @@ public interface ReduceNeighborsFunction <VV> extends Function, Serializable {
 	 * It combines two neighboring vertex values into one new value of the same type.
 	 * For each vertex, this function is consecutively called,
 	 * until only a single value for each vertex remains.
-	 * 
+	 *
 	 * @param firstNeighborValue the first neighboring vertex value to combine
 	 * @param secondNeighborValue the second neighboring vertex value to combine
 	 * @return the combined value of both input values

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Triplet.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Triplet.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Triplet.java
index 2ae0903..77a577f 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Triplet.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Triplet.java
@@ -35,7 +35,7 @@ public class Triplet <K, VV, EV> extends Tuple5<K, K, VV, VV, EV> {
 	public Triplet() {}
 
 	/**
-	 * Constructs a Triplet from a given source vertex, target vertex and edge
+	 * Constructs a Triplet from a given source vertex, target vertex, and edge.
 	 *
 	 * @param srcVertex
 	 * @param trgVertex

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/VertexJoinFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/VertexJoinFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/VertexJoinFunction.java
index f40dac9..aaa75f8 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/VertexJoinFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/VertexJoinFunction.java
@@ -35,7 +35,7 @@ public interface VertexJoinFunction<VV, T> extends Function, Serializable {
 	/**
 	 * Applies a transformation on the current vertex value
 	 * and the value of the matched tuple of the input DataSet.
-	 * 
+	 *
 	 * @param vertexValue the current vertex value
 	 * @param inputValue the value of the matched Tuple2 input
 	 * @return the new vertex value

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/AbstractDataSetAnalytic.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/AbstractDataSetAnalytic.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/AbstractDataSetAnalytic.java
deleted file mode 100644
index 46007ca..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/AbstractDataSetAnalytic.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.asm.dataset;
-
-import org.apache.flink.api.java.DataSet;
-import org.apache.flink.api.java.ExecutionEnvironment;
-import org.apache.flink.util.Preconditions;
-
-/**
- * Base class for {@link DataSetAnalytic}.
- *
- * @param <T> element type
- * @param <R> the return type
- */
-public abstract class AbstractDataSetAnalytic<T, R>
-implements DataSetAnalytic<T, R> {
-
-	protected ExecutionEnvironment env;
-
-	@Override
-	public AbstractDataSetAnalytic<T, R> run(DataSet<T> input)
-			throws Exception {
-		env = input.getExecutionEnvironment();
-		return this;
-	}
-
-	@Override
-	public R execute()
-			throws Exception {
-		env.execute();
-		return getResult();
-	}
-
-	@Override
-	public R execute(String jobName)
-			throws Exception {
-		Preconditions.checkNotNull(jobName);
-
-		env.execute(jobName);
-		return getResult();
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/ChecksumHashCode.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/ChecksumHashCode.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/ChecksumHashCode.java
index 1f8fe99..35a8876 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/ChecksumHashCode.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/ChecksumHashCode.java
@@ -18,14 +18,15 @@
 
 package org.apache.flink.graph.asm.dataset;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.Accumulator;
 import org.apache.flink.api.common.accumulators.SimpleAccumulator;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 
 /**
@@ -35,7 +36,7 @@ import java.io.IOException;
  * @param <T> element type
  */
 public class ChecksumHashCode<T>
-extends AbstractDataSetAnalytic<T, Checksum> {
+extends DataSetAnalyticBase<T, Checksum> {
 
 	private static final String CHECKSUM = "checksum";
 
@@ -136,11 +137,19 @@ extends AbstractDataSetAnalytic<T, Checksum> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Checksum rhs = (Checksum)obj;
+			Checksum rhs = (Checksum) obj;
 
 			return new EqualsBuilder()
 				.append(count, rhs.count)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Collect.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Collect.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Collect.java
index 771a044..ad2886f 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Collect.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Collect.java
@@ -34,7 +34,7 @@ import java.util.List;
  * @param <T> element type
  */
 public class Collect<T>
-extends AbstractDataSetAnalytic<T, List<T>> {
+extends DataSetAnalyticBase<T, List<T>> {
 
 	private static final String COLLECT = "collect";
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Count.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Count.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Count.java
index 7bc97d5..34ef979 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Count.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/Count.java
@@ -30,7 +30,7 @@ import java.io.IOException;
  * @param <T> element type
  */
 public class Count<T>
-extends AbstractDataSetAnalytic<T, Long> {
+extends DataSetAnalyticBase<T, Long> {
 
 	private static final String COUNT = "count";
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalytic.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalytic.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalytic.java
index abf4039..9c5c448 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalytic.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalytic.java
@@ -35,7 +35,7 @@ import org.apache.flink.api.java.operators.CustomUnaryOperation;
 public interface DataSetAnalytic<T, R> {
 
 	/**
-	 * This method must be called after the program has executed:
+	 * This method must be called after the program has executed.
 	 *  1) "run" analytics and algorithms
 	 *  2) call ExecutionEnvironment.execute()
 	 *  3) get analytic results

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalyticBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalyticBase.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalyticBase.java
new file mode 100644
index 0000000..a1df14c
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/dataset/DataSetAnalyticBase.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.asm.dataset;
+
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.util.Preconditions;
+
+/**
+ * Base class for {@link DataSetAnalytic}.
+ *
+ * @param <T> element type
+ * @param <R> the return type
+ */
+public abstract class DataSetAnalyticBase<T, R>
+implements DataSetAnalytic<T, R> {
+
+	protected ExecutionEnvironment env;
+
+	@Override
+	public DataSetAnalyticBase<T, R> run(DataSet<T> input)
+			throws Exception {
+		env = input.getExecutionEnvironment();
+		return this;
+	}
+
+	@Override
+	public R execute()
+			throws Exception {
+		env.execute();
+		return getResult();
+	}
+
+	@Override
+	public R execute(String jobName)
+			throws Exception {
+		Preconditions.checkNotNull(jobName);
+
+		env.execute(jobName);
+		return getResult();
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/DegreeAnnotationFunctions.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/DegreeAnnotationFunctions.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/DegreeAnnotationFunctions.java
index b91b4cb..a4791e9 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/DegreeAnnotationFunctions.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/DegreeAnnotationFunctions.java
@@ -30,8 +30,13 @@ import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.LongValue;
 
+/**
+ * Common user-defined-functions.
+ */
 public class DegreeAnnotationFunctions {
 
+	private DegreeAnnotationFunctions() {}
+
 	// --------------------------------------------------------------------------------------------
 	//  Vertex functions
 	// --------------------------------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java
index 6f808f3..27c829b 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeDegreesPair.java
@@ -67,7 +67,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple3<EV, Degrees, Deg
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeDegreesPair.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeDegreesPair.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java
index 03fd1ba..3c4e611 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeSourceDegrees.java
@@ -66,7 +66,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple2<EV, Degrees>>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeSourceDegrees.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeSourceDegrees.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java
index 7526d00..94788e2 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/EdgeTargetDegrees.java
@@ -66,7 +66,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple2<EV, Degrees>>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeTargetDegrees.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeTargetDegrees.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java
index f73d37b..0333b8b 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexDegrees.java
@@ -32,7 +32,7 @@ import org.apache.flink.graph.EdgeOrder;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.types.ByteValue;
@@ -93,7 +93,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Vertex<K, Degrees>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! VertexDegrees.class.isAssignableFrom(other.getClass())) {
+		if (!VertexDegrees.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -271,7 +271,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Vertex<K, Degrees>> {
 	extends Tuple3<LongValue, LongValue, LongValue> {
 		private static final int HASH_SEED = 0x3a12fc31;
 
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
 
 		public Degrees() {
 			this(new LongValue(), new LongValue(), new LongValue());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java
index 5fdd8f9..f316b9b 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexInDegree.java
@@ -86,7 +86,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Vertex<K, LongValue>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! VertexInDegree.class.isAssignableFrom(other.getClass())) {
+		if (!VertexInDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java
index 8e3e9c6..b04391d 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/directed/VertexOutDegree.java
@@ -86,7 +86,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Vertex<K, LongValue>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! VertexOutDegree.class.isAssignableFrom(other.getClass())) {
+		if (!VertexOutDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java
index 71b4891..c6d0646 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeDegreePair.java
@@ -88,7 +88,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple3<EV, LongValue, L
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java
index ee9a144..88bab5a 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeSourceDegree.java
@@ -86,7 +86,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple2<EV, LongValue>>>
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java
index 1255d86..21918c7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/EdgeTargetDegree.java
@@ -86,7 +86,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Edge<K, Tuple2<EV, LongValue>>>
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
+		if (!EdgeSourceDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java
index b731548..9ea99a7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/annotate/undirected/VertexDegree.java
@@ -21,15 +21,15 @@ package org.apache.flink.graph.asm.degree.annotate.undirected;
 import org.apache.flink.api.common.functions.MapFunction;
 import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
-import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.graph.asm.degree.annotate.DegreeAnnotationFunctions.DegreeCount;
 import org.apache.flink.graph.asm.degree.annotate.DegreeAnnotationFunctions.JoinVertexWithVertexDegree;
 import org.apache.flink.graph.asm.degree.annotate.DegreeAnnotationFunctions.MapEdgeToSourceId;
 import org.apache.flink.graph.asm.degree.annotate.DegreeAnnotationFunctions.MapEdgeToTargetId;
+import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
+import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.util.Preconditions;
 
@@ -106,7 +106,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Vertex<K, LongValue>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! VertexDegree.class.isAssignableFrom(other.getClass())) {
+		if (!VertexDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java
index e5eea61..ae1e5b6 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/degree/filter/undirected/MaximumDegree.java
@@ -123,7 +123,7 @@ extends GraphAlgorithmWrappingGraph<K, VV, EV, K, VV, EV> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! MaximumDegree.class.isAssignableFrom(other.getClass())) {
+		if (!MaximumDegree.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java
index 15c8359..d978096 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/directed/Simplify.java
@@ -64,7 +64,7 @@ extends GraphAlgorithmWrappingGraph<K, VV, EV, K, VV, EV> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! Simplify.class.isAssignableFrom(other.getClass())) {
+		if (!Simplify.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java
index a3c007e..617dce1 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/simple/undirected/Simplify.java
@@ -49,7 +49,7 @@ extends GraphAlgorithmWrappingGraph<K, VV, EV, K, VV, EV> {
 	 * Simplifies an undirected graph by adding reverse edges and removing
 	 * self-loops and duplicate edges.
 	 *
-	 * When clip-and-flip is set, edges where source < target are removed
+	 * <p>When clip-and-flip is set, edges where source < target are removed
 	 * before symmetrizing the graph.
 	 *
 	 * @param clipAndFlip method for generating simple graph
@@ -82,7 +82,7 @@ extends GraphAlgorithmWrappingGraph<K, VV, EV, K, VV, EV> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! Simplify.class.isAssignableFrom(other.getClass())) {
+		if (!Simplify.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java
index f7cc601..9c4f88e 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/Translate.java
@@ -71,7 +71,7 @@ public class Translate {
 		Preconditions.checkNotNull(vertices);
 		Preconditions.checkNotNull(translator);
 
-		Class<Vertex<NEW, VV>> vertexClass = (Class<Vertex<NEW, VV>>)(Class<? extends Vertex>) Vertex.class;
+		Class<Vertex<NEW, VV>> vertexClass = (Class<Vertex<NEW, VV>>) (Class<? extends Vertex>) Vertex.class;
 		TypeInformation<OLD> oldType = ((TupleTypeInfo<Vertex<OLD, VV>>) vertices.getType()).getTypeAt(0);
 		TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(translator, TranslateFunction.class, false, false, oldType, null, false);
 		TypeInformation<VV> vertexValueType = ((TupleTypeInfo<Vertex<OLD, VV>>) vertices.getType()).getTypeAt(1);
@@ -146,7 +146,7 @@ public class Translate {
 		Preconditions.checkNotNull(edges);
 		Preconditions.checkNotNull(translator);
 
-		Class<Edge<NEW, EV>> edgeClass = (Class<Edge<NEW, EV>>)(Class<? extends Edge>) Edge.class;
+		Class<Edge<NEW, EV>> edgeClass = (Class<Edge<NEW, EV>>) (Class<? extends Edge>) Edge.class;
 		TypeInformation<OLD> oldType = ((TupleTypeInfo<Edge<OLD, EV>>) edges.getType()).getTypeAt(0);
 		TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(translator, TranslateFunction.class, false, false, oldType, null, false);
 		TypeInformation<EV> edgeValueType = ((TupleTypeInfo<Edge<OLD, EV>>) edges.getType()).getTypeAt(2);
@@ -222,7 +222,7 @@ public class Translate {
 		Preconditions.checkNotNull(vertices);
 		Preconditions.checkNotNull(translator);
 
-		Class<Vertex<K, NEW>> vertexClass = (Class<Vertex<K, NEW>>)(Class<? extends Vertex>) Vertex.class;
+		Class<Vertex<K, NEW>> vertexClass = (Class<Vertex<K, NEW>>) (Class<? extends Vertex>) Vertex.class;
 		TypeInformation<K> idType = ((TupleTypeInfo<Vertex<K, OLD>>) vertices.getType()).getTypeAt(0);
 		TypeInformation<OLD> oldType = ((TupleTypeInfo<Vertex<K, OLD>>) vertices.getType()).getTypeAt(1);
 		TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(translator, TranslateFunction.class, false, false, oldType, null, false);
@@ -297,7 +297,7 @@ public class Translate {
 		Preconditions.checkNotNull(edges);
 		Preconditions.checkNotNull(translator);
 
-		Class<Edge<K, NEW>> edgeClass = (Class<Edge<K, NEW>>)(Class<? extends Edge>) Edge.class;
+		Class<Edge<K, NEW>> edgeClass = (Class<Edge<K, NEW>>) (Class<? extends Edge>) Edge.class;
 		TypeInformation<K> idType = ((TupleTypeInfo<Edge<K, OLD>>) edges.getType()).getTypeAt(0);
 		TypeInformation<OLD> oldType = ((TupleTypeInfo<Edge<K, OLD>>) edges.getType()).getTypeAt(2);
 		TypeInformation<NEW> newType = TypeExtractor.getUnaryOperatorReturnType(translator, TranslateFunction.class, false, false, oldType, null, false);

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateEdgeValues.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateEdgeValues.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateEdgeValues.java
index c8000e4..3956a81 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateEdgeValues.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateEdgeValues.java
@@ -79,7 +79,7 @@ extends GraphAlgorithmWrappingGraph<K, VV, OLD, K, VV, NEW> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! TranslateEdgeValues.class.isAssignableFrom(other.getClass())) {
+		if (!TranslateEdgeValues.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateFunction.java
index 7f495bf..fb1e3c1 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateFunction.java
@@ -26,8 +26,8 @@ import java.io.Serializable;
  * Base interface for Translate functions. Translate functions take elements and transform them,
  * element wise. A Translate function always produces a single result element for each input element.
  * Typical applications are transcribing between data types or manipulating element values.
- * <p>
- * Translate functions are used within the Graph API and by translating GraphAlgorithms.
+ *
+ * <p>Translate functions are used within the Graph API and by translating GraphAlgorithms.
  *
  * @param <T> Type of the input elements.
  * @param <O> Type of the returned elements.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateGraphIds.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateGraphIds.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateGraphIds.java
index 58cb7e2..d8c5676 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateGraphIds.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateGraphIds.java
@@ -30,7 +30,7 @@ import static org.apache.flink.graph.asm.translate.Translate.translateEdgeIds;
 import static org.apache.flink.graph.asm.translate.Translate.translateVertexIds;
 
 /**
- * Translate {@link Vertex} and {@link Edge} IDs of a {@link Graph} using the given {@link TranslateFunction}
+ * Translate {@link Vertex} and {@link Edge} IDs of a {@link Graph} using the given {@link TranslateFunction}.
  *
  * @param <OLD> old graph ID type
  * @param <NEW> new graph ID type
@@ -47,7 +47,7 @@ extends GraphAlgorithmWrappingGraph<OLD, VV, EV, NEW, VV, EV> {
 	private int parallelism = PARALLELISM_DEFAULT;
 
 	/**
-	 * Translate {@link Vertex} and {@link Edge} IDs of a {@link Graph} using the given {@link TranslateFunction}
+	 * Translate {@link Vertex} and {@link Edge} IDs of a {@link Graph} using the given {@link TranslateFunction}.
 	 *
 	 * @param translator implements conversion from {@code OLD} to {@code NEW}
 	 */
@@ -81,7 +81,7 @@ extends GraphAlgorithmWrappingGraph<OLD, VV, EV, NEW, VV, EV> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! TranslateGraphIds.class.isAssignableFrom(other.getClass())) {
+		if (!TranslateGraphIds.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateVertexValues.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateVertexValues.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateVertexValues.java
index 7447e11..452cb26 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateVertexValues.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/TranslateVertexValues.java
@@ -79,7 +79,7 @@ extends GraphAlgorithmWrappingGraph<K, OLD, EV, K, NEW, EV> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingGraph other) {
 		Preconditions.checkNotNull(other);
 
-		if (! TranslateVertexValues.class.isAssignableFrom(other.getClass())) {
+		if (!TranslateVertexValues.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 


[09/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffset.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffset.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffset.java
index d44ece4..d9a881a 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffset.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueAddOffset.java
@@ -32,7 +32,7 @@ implements TranslateFunction<LongValue, LongValue> {
 	/**
 	 * Translate {@link LongValue} by adding a constant offset value.
 	 *
-	 * The summation is *not* checked for overflow or underflow.
+	 * <p>The summation is *not* checked for overflow or underflow.
 	 *
 	 * @param offset value to be added to each element
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValue.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValue.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValue.java
index 30a74df..ef66307 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValue.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToSignedIntValue.java
@@ -26,7 +26,7 @@ import org.apache.flink.util.MathUtils;
 /**
  * Translate {@link LongValue} to {@link IntValue}.
  *
- * Throws {@link RuntimeException} for integer overflow.
+ * <p>Throws {@link RuntimeException} for integer overflow.
  */
 public class LongValueToSignedIntValue
 implements TranslateFunction<LongValue, IntValue> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValue.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValue.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValue.java
index 741bd62..d7896d3 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValue.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/asm/translate/translators/LongValueToUnsignedIntValue.java
@@ -25,7 +25,7 @@ import org.apache.flink.types.LongValue;
 /**
  * Translate {@link LongValue} to {@link IntValue}.
  *
- * Throws {@link RuntimeException} for integer overflow.
+ * <p>Throws {@link RuntimeException} for integer overflow.
  */
 public class LongValueToUnsignedIntValue
 implements TranslateFunction<LongValue, IntValue> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java
index b325103..029e2c4 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/BipartiteGraph.java
@@ -118,9 +118,9 @@ public class BipartiteGraph<KT, KB, VVT, VVB, EV> {
 	 * in the new graph will exist only if the original bipartite graph contains a bottom vertex they are both
 	 * connected to.
 	 *
-	 * The simple projection performs a single join and returns edges containing the bipartite edge values.
+	 * <p>The simple projection performs a single join and returns edges containing the bipartite edge values.
 	 *
-	 * Note: KT must override .equals(). This requirement may be removed in a future release.
+	 * <p>Note: KT must override .equals(). This requirement may be removed in a future release.
 	 *
 	 * @return simple top projection of the bipartite graph
 	 */
@@ -162,9 +162,9 @@ public class BipartiteGraph<KT, KB, VVT, VVB, EV> {
 	 * vertices in the new graph will exist only if the original bipartite graph contains a top vertex they are both
 	 * connected to.
 	 *
-	 * The simple projection performs a single join and returns edges containing the bipartite edge values.
+	 * <p>The simple projection performs a single join and returns edges containing the bipartite edge values.
 	 *
-	 * Note: KB must override .equals(). This requirement may be removed in a future release.
+	 * <p>Note: KB must override .equals(). This requirement may be removed in a future release.
 	 *
 	 * @return simple bottom projection of the bipartite graph
 	 */
@@ -205,10 +205,10 @@ public class BipartiteGraph<KT, KB, VVT, VVB, EV> {
 	 * Convert a bipartite graph into a graph that contains only top vertices. An edge between two vertices in the new
 	 * graph will exist only if the original bipartite graph contains at least one bottom vertex they both connect to.
 	 *
-	 * The full projection performs three joins and returns edges containing the the connecting vertex ID and value,
+	 * <p>The full projection performs three joins and returns edges containing the the connecting vertex ID and value,
 	 * both top vertex values, and both bipartite edge values.
 	 *
-	 * Note: KT must override .equals(). This requirement may be removed in a future release.
+	 * <p>Note: KT must override .equals(). This requirement may be removed in a future release.
 	 *
 	 * @return full top projection of the bipartite graph
 	 */
@@ -271,10 +271,10 @@ public class BipartiteGraph<KT, KB, VVT, VVB, EV> {
 	 * Convert a bipartite graph into a graph that contains only bottom vertices. An edge between two vertices in the
 	 * new graph will exist only if the original bipartite graph contains at least one top vertex they both connect to.
 	 *
-	 * The full projection performs three joins and returns edges containing the the connecting vertex ID and value,
+	 * <p>The full projection performs three joins and returns edges containing the the connecting vertex ID and value,
 	 * both bottom vertex values, and both bipartite edge values.
 	 *
-	 * Note: KB must override .equals(). This requirement may be removed in a future release.
+	 * <p>Note: KB must override .equals(). This requirement may be removed in a future release.
 	 *
 	 * @return full bottom projection of the bipartite graph
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/Projection.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/Projection.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/Projection.java
index 95a9cf6..ffb3462 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/Projection.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/bipartite/Projection.java
@@ -22,7 +22,7 @@ import org.apache.flink.api.java.tuple.Tuple6;
 import org.apache.flink.graph.Vertex;
 
 /**
- * The edge value of a full bipartite projection contains:
+ * The edge value of a full bipartite projection. This contains:
  * <ul>
  *     <li>the ID and vertex value of the connecting vertex</li>
  *     <li>the vertex value for the source and target vertex</li>

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/AbstractGraphGenerator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/AbstractGraphGenerator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/AbstractGraphGenerator.java
deleted file mode 100644
index 58266a5..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/AbstractGraphGenerator.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.generator;
-
-import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
-
-public abstract class AbstractGraphGenerator<K, VV, EV>
-implements GraphGenerator<K, VV, EV> {
-
-	// Optional configuration
-	protected int parallelism = PARALLELISM_DEFAULT;
-
-	@Override
-	public GraphGenerator<K, VV, EV> setParallelism(int parallelism) {
-		this.parallelism = parallelism;
-
-		return this;
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CirculantGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CirculantGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CirculantGraph.java
index 9569b74..1dc9e66 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CirculantGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CirculantGraph.java
@@ -41,7 +41,7 @@ import java.util.List;
  * @see <a href="http://mathworld.wolfram.com/CirculantGraph.html">Circulant Graph at Wolfram MathWorld</a>
  */
 public class CirculantGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 
@@ -178,7 +178,7 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 		}
 
 		/**
-		 * Get the range offset
+		 * Get the range offset.
 		 *
 		 * @return the offset
 		 */
@@ -187,7 +187,7 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 		}
 
 		/**
-		 * Get the range length
+		 * Get the range length.
 		 *
 		 * @return the length
 		 */
@@ -196,7 +196,7 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 		}
 
 		/**
-		 * Get the offset of the last index in the range
+		 * Get the offset of the last index in the range.
 		 *
 		 * @return last offset
 		 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CompleteGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CompleteGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CompleteGraph.java
index 9dabe56..bf7dedf 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CompleteGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CompleteGraph.java
@@ -28,7 +28,7 @@ import org.apache.flink.util.Preconditions;
  * @see <a href="http://mathworld.wolfram.com/CompleteGraph.html">Complete Graph at Wolfram MathWorld</a>
  */
 public class CompleteGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CycleGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CycleGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CycleGraph.java
index 2386fe8..5b61fa8 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CycleGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/CycleGraph.java
@@ -28,7 +28,7 @@ import org.apache.flink.util.Preconditions;
  * @see <a href="http://mathworld.wolfram.com/CycleGraph.html">Cycle Graph at Wolfram MathWorld</a>
  */
 public class CycleGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EchoGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EchoGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EchoGraph.java
index c15cdca..d834df1 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EchoGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EchoGraph.java
@@ -30,13 +30,13 @@ import org.apache.flink.util.Preconditions;
  * single range of offsets centered at {@code n/2}. A vertex is connected to
  * 'far' vertices, which connect to 'near' vertices, which connect to 'far'
  * vertices, ....
- * <p>
- * Every {@link Vertex} in the {@link EchoGraph} has the same degree.
+ *
+ * <p>Every {@link Vertex} in the {@link EchoGraph} has the same degree.
  * and vertices as far as possible are chose to be linked.
  * {@link EchoGraph} is a specific case of {@link CirculantGraph}.
  */
 public class EchoGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EmptyGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EmptyGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EmptyGraph.java
index 23e3a9c..466e2d3 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EmptyGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/EmptyGraph.java
@@ -37,7 +37,7 @@ import java.util.Collections;
  * @see <a href="http://mathworld.wolfram.com/EmptyGraph.html">Empty Graph at Wolfram MathWorld</a>
  */
 public class EmptyGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 1;
 
@@ -71,7 +71,7 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 			ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.NULL_VALUE_TYPE_INFO);
 
 		DataSource<Edge<LongValue, NullValue>> edges = env
-			.fromCollection(Collections.<Edge<LongValue ,NullValue>>emptyList(), typeInformation)
+			.fromCollection(Collections.<Edge<LongValue, NullValue>>emptyList(), typeInformation)
 				.setParallelism(parallelism)
 				.name("Empty edge set");
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGenerator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGenerator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGenerator.java
index f972d98..24737810 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGenerator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGenerator.java
@@ -26,11 +26,11 @@ import org.apache.flink.graph.Graph;
  * - scale-free, generating the same graph regardless of parallelism
  * - thrifty, using as few operators as possible
  *
- * Graph generators should prefer to emit edges sorted by the source label.
+ * <p>Graph generators should prefer to emit edges sorted by the source label.
  *
- * @param <K> the key type for edge and vertex identifiers
- * @param <VV> the value type for vertices
- * @param <EV> the value type for edges
+ * @param <K> graph ID type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
  */
 public interface GraphGenerator<K, VV, EV> {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorBase.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorBase.java
new file mode 100644
index 0000000..1c4d097
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorBase.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.generator;
+
+import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
+
+/**
+ * Base class for graph generators.
+ *
+ * @param <K> graph ID type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
+ */
+public abstract class GraphGeneratorBase<K, VV, EV>
+implements GraphGenerator<K, VV, EV> {
+
+	// Optional configuration
+	protected int parallelism = PARALLELISM_DEFAULT;
+
+	@Override
+	public GraphGenerator<K, VV, EV> setParallelism(int parallelism) {
+		this.parallelism = parallelism;
+
+		return this;
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java
index 485394c..fca9d8b 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GraphGeneratorUtils.java
@@ -32,8 +32,13 @@ import org.apache.flink.types.NullValue;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.LongValueSequenceIterator;
 
+/**
+ * Utilities for graph generators.
+ */
 public class GraphGeneratorUtils {
 
+	private GraphGeneratorUtils() {}
+
 	/**
 	 * Generates {@link Vertex Vertices} with sequential, numerical labels.
 	 *
@@ -43,7 +48,7 @@ public class GraphGeneratorUtils {
 	 * @return {@link DataSet} of sequentially labeled {@link Vertex Vertices}
 	 */
 	public static DataSet<Vertex<LongValue, NullValue>> vertexSequence(ExecutionEnvironment env, int parallelism, long vertexCount) {
-		LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, vertexCount-1);
+		LongValueSequenceIterator iterator = new LongValueSequenceIterator(0, vertexCount - 1);
 
 		DataSource<LongValue> vertexLabels = env
 			.fromParallelCollection(iterator, LongValue.class)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GridGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GridGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GridGraph.java
index 0570dd2..cae2bc4 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GridGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/GridGraph.java
@@ -39,7 +39,7 @@ import java.util.List;
  * @see <a href="http://mathworld.wolfram.com/GridGraph.html">Grid Graph at Wolfram MathWorld</a>
  */
 public class GridGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	// Required to create the DataSource
 	private final ExecutionEnvironment env;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/HypercubeGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/HypercubeGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/HypercubeGraph.java
index e4eac69..daaaf53 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/HypercubeGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/HypercubeGraph.java
@@ -28,7 +28,7 @@ import org.apache.flink.util.Preconditions;
  * @see <a href="http://mathworld.wolfram.com/HypercubeGraph.html">Hypercube Graph at Wolfram MathWorld</a>
  */
 public class HypercubeGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_DIMENSIONS = 1;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/PathGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/PathGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/PathGraph.java
index 5c4343b..e61fcd8 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/PathGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/PathGraph.java
@@ -28,7 +28,7 @@ import org.apache.flink.util.Preconditions;
  * @see <a href="http://mathworld.wolfram.com/PathGraph.html">Path Graph at Wolfram MathWorld</a>
  */
 public class PathGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 
@@ -42,8 +42,8 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 	 * An undirected {@link Graph} with {@code n} vertices where each vertex
 	 * v<sub>i</sub> connects to adjacent vertices v<sub>i+1</sub> when
 	 * {@code i < n-1} and v<sub>i-1</sub> when {@code i > 0}.
-	 * <p>
-	 * A {@code PathGraph} is distinguished from a {@code CycleGraph} in that
+	 *
+	 * <p>A {@code PathGraph} is distinguished from a {@code CycleGraph} in that
 	 * the first and last vertex are not connected, breaking the cycle.
 	 *
 	 * @param env the Flink execution environment

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java
index 071b415..d14d32c 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/RMatGraph.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.graph.generator;
 
-import org.apache.commons.math3.random.RandomGenerator;
 import org.apache.flink.api.common.functions.FlatMapFunction;
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.ExecutionEnvironment;
@@ -32,13 +31,15 @@ import org.apache.flink.types.NullValue;
 import org.apache.flink.util.Collector;
 import org.apache.flink.util.Preconditions;
 
+import org.apache.commons.math3.random.RandomGenerator;
+
 import java.util.List;
 
 /**
  * @see <a href="http://www.cs.cmu.edu/~christos/PUBLICATIONS/siam04.pdf">R-MAT: A Recursive Model for Graph Mining</a>
  */
 public class RMatGraph<T extends RandomGenerator>
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 1;
 
@@ -64,15 +65,15 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 	private final long edgeCount;
 
 	// Optional configuration
-	public float A = DEFAULT_A;
+	private float a = DEFAULT_A;
 
-	public float B = DEFAULT_B;
+	private float b = DEFAULT_B;
 
-	public float C = DEFAULT_C;
+	private float c = DEFAULT_C;
 
 	private boolean noiseEnabled = false;
 
-	public float noise = DEFAULT_NOISE;
+	private float noise = DEFAULT_NOISE;
 
 	/**
 	 * A directed power-law multi{@link Graph graph} generated using the
@@ -99,22 +100,22 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 	/**
 	 * The parameters for recursively subdividing the adjacency matrix.
 	 *
-	 * Setting A = B = C = 0.25 emulates the Erdős–Rényi model.
+	 * <p>Setting A = B = C = 0.25 emulates the Erdős–Rényi model.
 	 *
-	 * Graph500 uses A = 0.57, B = C = 0.19.
+	 * <p>Graph500 uses A = 0.57, B = C = 0.19.
 	 *
-	 * @param A likelihood of source bit = 0, target bit = 0
-	 * @param B likelihood of source bit = 0, target bit = 1
-	 * @param C likelihood of source bit = 1, target bit = 0
+	 * @param a likelihood of source bit = 0, target bit = 0
+	 * @param b likelihood of source bit = 0, target bit = 1
+	 * @param c likelihood of source bit = 1, target bit = 0
 	 * @return this
 	 */
-	public RMatGraph<T> setConstants(float A, float B, float C) {
-		Preconditions.checkArgument(A >= 0.0f && B >= 0.0f && C >= 0.0f && A + B + C <= 1.0f,
+	public RMatGraph<T> setConstants(float a, float b, float c) {
+		Preconditions.checkArgument(a >= 0.0f && b >= 0.0f && c >= 0.0f && a + b + c <= 1.0f,
 			"RMat parameters A, B, and C must be non-negative and sum to less than or equal to one");
 
-		this.A = A;
-		this.B = B;
-		this.C = C;
+		this.a = a;
+		this.b = b;
+		this.c = c;
 
 		return this;
 	}
@@ -155,7 +156,7 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 			.rebalance()
 				.setParallelism(parallelism)
 				.name("Rebalance")
-			.flatMap(new GenerateEdges<T>(vertexCount, scale, A, B, C, noiseEnabled, noise))
+			.flatMap(new GenerateEdges<T>(vertexCount, scale, a, b, c, noiseEnabled, noise))
 				.setParallelism(parallelism)
 				.name("RMat graph edges");
 
@@ -174,13 +175,13 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 
 		private final int scale;
 
-		private final float A;
+		private final float a;
 
-		private final float B;
+		private final float b;
 
-		private final float C;
+		private final float c;
 
-		private final float D;
+		private final float d;
 
 		private final boolean noiseEnabled;
 
@@ -195,13 +196,13 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 
 		private Edge<LongValue, NullValue> targetToSource = new Edge<>(target, source, NullValue.getInstance());
 
-		public GenerateEdges(long vertexCount, int scale, float A, float B, float C, boolean noiseEnabled, float noise) {
+		public GenerateEdges(long vertexCount, int scale, float a, float b, float c, boolean noiseEnabled, float noise) {
 			this.vertexCount = vertexCount;
 			this.scale = scale;
-			this.A = A;
-			this.B = B;
-			this.C = C;
-			this.D = 1.0f - A - B - C;
+			this.a = a;
+			this.b = b;
+			this.c = c;
+			this.d = 1.0f - a - b - c;
 			this.noiseEnabled = noiseEnabled;
 			this.noise = noise;
 		}
@@ -217,10 +218,10 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 				long y = 0;
 
 				// matrix constants are reset for each edge
-				float a = A;
-				float b = B;
-				float c = C;
-				float d = D;
+				float a = this.a;
+				float b = this.b;
+				float c = this.c;
+				float d = this.d;
 
 				for (int bit = 0; bit < scale; bit++) {
 					// generated next bit for source and target

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/SingletonEdgeGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/SingletonEdgeGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/SingletonEdgeGraph.java
index 125501c..159e55d 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/SingletonEdgeGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/SingletonEdgeGraph.java
@@ -34,7 +34,7 @@ import org.apache.flink.util.Preconditions;
  * A singleton-edge {@link Graph} contains one or more isolated two-paths.
  */
 public class SingletonEdgeGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_PAIR_COUNT = 1;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/StarGraph.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/StarGraph.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/StarGraph.java
index f0f4e5a..7133320 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/StarGraph.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/StarGraph.java
@@ -35,7 +35,7 @@ import org.apache.flink.util.Preconditions;
  * @see <a href="http://mathworld.wolfram.com/StarGraph.html">Star Graph at Wolfram MathWorld</a>
  */
 public class StarGraph
-extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
+extends GraphGeneratorBase<LongValue, NullValue, NullValue> {
 
 	public static final int MINIMUM_VERTEX_COUNT = 2;
 
@@ -87,18 +87,18 @@ extends AbstractGraphGenerator<LongValue, NullValue, NullValue> {
 
 		private LongValue center = new LongValue(0);
 
-		private Edge<LongValue, NullValue> center_to_leaf = new Edge<>(center, null, NullValue.getInstance());
+		private Edge<LongValue, NullValue> centerToLeaf = new Edge<>(center, null, NullValue.getInstance());
 
-		private Edge<LongValue, NullValue> leaf_to_center = new Edge<>(null, center, NullValue.getInstance());
+		private Edge<LongValue, NullValue> leafToCenter = new Edge<>(null, center, NullValue.getInstance());
 
 		@Override
 		public void flatMap(LongValue leaf, Collector<Edge<LongValue, NullValue>> out)
 				throws Exception {
-			center_to_leaf.f1 = leaf;
-			out.collect(center_to_leaf);
+			centerToLeaf.f1 = leaf;
+			out.collect(centerToLeaf);
 
-			leaf_to_center.f0 = leaf;
-			out.collect(leaf_to_center);
+			leafToCenter.f0 = leaf;
+			out.collect(leafToCenter);
 		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/AbstractGeneratorFactory.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/AbstractGeneratorFactory.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/AbstractGeneratorFactory.java
deleted file mode 100644
index fc9e1ba..0000000
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/AbstractGeneratorFactory.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.graph.generator.random;
-
-import org.apache.commons.math3.random.RandomGenerator;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * This base class handles the task of dividing the requested work into the
- * appropriate number of blocks of near-equal size.
- *
- * @param <T> the type of the {@code RandomGenerator}
- */
-public abstract class AbstractGeneratorFactory<T extends RandomGenerator>
-implements RandomGenerableFactory<T> {
-
-	// A large computation will run in parallel but blocks are generated on
-	// and distributed from a single node. This limit should be greater
-	// than the maximum expected parallelism.
-	public static final int MAXIMUM_BLOCK_COUNT = 1 << 15;
-
-	// This should be sufficiently large relative to the cost of instantiating
-	// and initializing the random generator and sufficiently small relative to
-	// the cost of generating random values.
-	protected abstract int getMinimumCyclesPerBlock();
-
-	protected abstract RandomGenerable<T> next();
-
-	@Override
-	public List<BlockInfo<T>> getRandomGenerables(long elementCount, int cyclesPerElement) {
-		long cycles = elementCount * cyclesPerElement;
-		int blockCount = Math.min((int) Math.ceil(cycles / (float) getMinimumCyclesPerBlock()), MAXIMUM_BLOCK_COUNT);
-
-		long elementsPerBlock = elementCount / blockCount;
-		long elementRemainder = elementCount % blockCount;
-
-		List<BlockInfo<T>> blocks = new ArrayList<>(blockCount);
-		long blockStart = 0;
-
-		for (int blockIndex = 0 ; blockIndex < blockCount ; blockIndex++) {
-			if (blockIndex == blockCount - elementRemainder) {
-				elementsPerBlock++;
-			}
-
-			RandomGenerable<T> randomGenerable = next();
-
-			blocks.add(new BlockInfo<>(randomGenerable, blockIndex, blockCount, blockStart, elementsPerBlock));
-
-			blockStart += elementsPerBlock;
-		}
-
-		return blocks;
-	}
-}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/GeneratorFactoryBase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/GeneratorFactoryBase.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/GeneratorFactoryBase.java
new file mode 100644
index 0000000..d3147aa
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/GeneratorFactoryBase.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.generator.random;
+
+import org.apache.commons.math3.random.RandomGenerator;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * This base class handles the task of dividing the requested work into the
+ * appropriate number of blocks of near-equal size.
+ *
+ * @param <T> the type of the {@code RandomGenerator}
+ */
+public abstract class GeneratorFactoryBase<T extends RandomGenerator>
+implements RandomGenerableFactory<T> {
+
+	// A large computation will run in parallel but blocks are generated on
+	// and distributed from a single node. This limit should be greater
+	// than the maximum expected parallelism.
+	public static final int MAXIMUM_BLOCK_COUNT = 1 << 15;
+
+	// This should be sufficiently large relative to the cost of instantiating
+	// and initializing the random generator and sufficiently small relative to
+	// the cost of generating random values.
+	protected abstract int getMinimumCyclesPerBlock();
+
+	protected abstract RandomGenerable<T> next();
+
+	@Override
+	public List<BlockInfo<T>> getRandomGenerables(long elementCount, int cyclesPerElement) {
+		long cycles = elementCount * cyclesPerElement;
+		int blockCount = Math.min((int) Math.ceil(cycles / (float) getMinimumCyclesPerBlock()), MAXIMUM_BLOCK_COUNT);
+
+		long elementsPerBlock = elementCount / blockCount;
+		long elementRemainder = elementCount % blockCount;
+
+		List<BlockInfo<T>> blocks = new ArrayList<>(blockCount);
+		long blockStart = 0;
+
+		for (int blockIndex = 0; blockIndex < blockCount; blockIndex++) {
+			if (blockIndex == blockCount - elementRemainder) {
+				elementsPerBlock++;
+			}
+
+			RandomGenerable<T> randomGenerable = next();
+
+			blocks.add(new BlockInfo<>(randomGenerable, blockIndex, blockCount, blockStart, elementsPerBlock));
+
+			blockStart += elementsPerBlock;
+		}
+
+		return blocks;
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/JDKRandomGeneratorFactory.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/JDKRandomGeneratorFactory.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/JDKRandomGeneratorFactory.java
index 2024cae..d406150 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/JDKRandomGeneratorFactory.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/JDKRandomGeneratorFactory.java
@@ -25,7 +25,7 @@ import org.apache.commons.math3.random.JDKRandomGenerator;
  * distributed collection of {@link JDKRandomGenerator}.
  */
 public class JDKRandomGeneratorFactory
-extends AbstractGeneratorFactory<JDKRandomGenerator> {
+extends GeneratorFactoryBase<JDKRandomGenerator> {
 
 	public static final long DEFAULT_SEED = 0x4b6f7e18198de7a4L;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/MersenneTwisterFactory.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/MersenneTwisterFactory.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/MersenneTwisterFactory.java
index 22a7b04..5dd5a31 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/MersenneTwisterFactory.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/MersenneTwisterFactory.java
@@ -25,7 +25,7 @@ import org.apache.commons.math3.random.MersenneTwister;
  * distributed collection of {@link MersenneTwister}.
  */
 public class MersenneTwisterFactory
-extends AbstractGeneratorFactory<MersenneTwister> {
+extends GeneratorFactoryBase<MersenneTwister> {
 
 	public static final long DEFAULT_SEED = 0x74c8cc8a58a9ceb9L;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerable.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerable.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerable.java
index 318b508..122d479 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerable.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerable.java
@@ -25,7 +25,7 @@ import org.apache.commons.math3.random.RandomGenerator;
  * RandomGenerator. This allows pre-processing or discovery to be distributed
  * and performed in parallel by Flink tasks.
  *
- * A distributed PRNG is described by Matsumoto and Takuji in
+ * <p>A distributed PRNG is described by Matsumoto and Takuji in
  * "Dynamic Creation of Pseudorandom Number Generators".
  *
  * @param <T> the type of the {@code RandomGenerator}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/ApplyFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/ApplyFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/ApplyFunction.java
index 19d08a5..4222f8a 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/ApplyFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/ApplyFunction.java
@@ -59,17 +59,17 @@ public abstract class ApplyFunction<K, VV, M> implements Serializable {
 	//---------------------------------------------------------------------------------------------
 
 	/**
-	 * This method is invoked once per superstep, after the {@link SumFunction} 
+	 * This method is invoked once per superstep, after the {@link SumFunction}
 	 * in a {@link GatherSumApplyIteration}.
 	 * It updates the Vertex values.
-	 * 
+	 *
 	 * @param newValue the value computed during the current superstep.
 	 * @param currentValue the current Vertex value.
 	 */
 	public abstract void apply(M newValue, VV currentValue);
 
 	/**
-	 * Sets the result for the apply function
+	 * Sets the result for the apply function.
 	 *
 	 * @param result the result of the apply phase
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java
index 079b4c7..72e18ae 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GSAConfiguration.java
@@ -32,19 +32,19 @@ import java.util.List;
  * the {@link org.apache.flink.graph.gsa.GatherFunction}, {@link org.apache.flink.graph.gsa.SumFunction} as well as
  * {@link org.apache.flink.graph.gsa.ApplyFunction}.
  *
- * The GSAConfiguration object is passed as an argument to
+ * <p>The GSAConfiguration object is passed as an argument to
  * {@link org.apache.flink.graph.Graph#runGatherSumApplyIteration(org.apache.flink.graph.gsa.GatherFunction,
  * org.apache.flink.graph.gsa.SumFunction, org.apache.flink.graph.gsa.ApplyFunction, int)}
  */
 public class GSAConfiguration extends IterationConfiguration {
 
-	/** the broadcast variables for the gather function **/
+	// the broadcast variables for the gather function
 	private List<Tuple2<String, DataSet<?>>> bcVarsGather = new ArrayList<>();
 
-	/** the broadcast variables for the sum function **/
+	// the broadcast variables for the sum function
 	private List<Tuple2<String, DataSet<?>>> bcVarsSum = new ArrayList<>();
 
-	/** the broadcast variables for the apply function **/
+	// the broadcast variables for the apply function
 	private List<Tuple2<String, DataSet<?>>> bcVarsApply = new ArrayList<>();
 
 	private EdgeDirection direction = EdgeDirection.OUT;
@@ -113,7 +113,7 @@ public class GSAConfiguration extends IterationConfiguration {
 
 	/**
 	 * Gets the direction from which the neighbors are to be selected
-	 * By default the neighbors who are target of the edges are selected
+	 * By default the neighbors who are target of the edges are selected.
 	 *
 	 * @return an EdgeDirection, which can be either IN, OUT or ALL.
 	 */
@@ -123,7 +123,7 @@ public class GSAConfiguration extends IterationConfiguration {
 
 	/**
 	 * Sets the direction in which neighbors are to be selected
-	 * By default the neighbors who are target of the edges are selected
+	 * By default the neighbors who are target of the edges are selected.
 	 *
 	 * @param direction - IN, OUT or ALL
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherFunction.java
index 90db9da..76e4cee 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherFunction.java
@@ -27,10 +27,10 @@ import java.util.Collection;
 
 /**
  * The base class for the first step of a {@link GatherSumApplyIteration}.
- * 
+ *
  * @param <VV> the vertex value type
  * @param <EV> the edge value type
- * @param <M> the output type 
+ * @param <M> the output type
  */
 @SuppressWarnings("serial")
 public abstract class GatherFunction<VV, EV, M> implements Serializable {
@@ -57,11 +57,11 @@ public abstract class GatherFunction<VV, EV, M> implements Serializable {
 	//---------------------------------------------------------------------------------------------
 
 	/**
-	 * This method is invoked once per superstep, for each {@link Neighbor} of each Vertex 
+	 * This method is invoked once per superstep, for each {@link Neighbor} of each Vertex
 	 * in the beginning of each superstep in a {@link GatherSumApplyIteration}.
 	 * It needs to produce a partial value, which will be combined with other partial value
 	 * in the next phase of the iteration.
-	 *  
+	 *
 	 * @param neighbor the input Neighbor. It provides access to the source Vertex and the Edge objects.
 	 * @return a partial result to be combined in the Sum phase.
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java
index 5c07a73..12d4977 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/GatherSumApplyIteration.java
@@ -105,7 +105,7 @@ public class GatherSumApplyIteration<K, VV, EV, M> implements CustomUnaryOperati
 	}
 
 	/**
-	 * Computes the results of the gather-sum-apply iteration
+	 * Computes the results of the gather-sum-apply iteration.
 	 *
 	 * @return The resulting DataSet
 	 */
@@ -161,7 +161,7 @@ public class GatherSumApplyIteration<K, VV, EV, M> implements CustomUnaryOperati
 		}
 
 		// Prepare the neighbors
-		if(this.configuration != null) {
+		if (this.configuration != null) {
 			direction = this.configuration.getDirection();
 		}
 		DataSet<Tuple2<K, Neighbor<VV, EV>>> neighbors;
@@ -246,7 +246,7 @@ public class GatherSumApplyIteration<K, VV, EV, M> implements CustomUnaryOperati
 	}
 
 	/**
-	 * Creates a new gather-sum-apply iteration operator for graphs
+	 * Creates a new gather-sum-apply iteration operator for graphs.
 	 *
 	 * @param edges The edge DataSet
 	 *

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/Neighbor.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/Neighbor.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/Neighbor.java
index 4c970fb..c69c7f7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/Neighbor.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/Neighbor.java
@@ -21,8 +21,9 @@ package org.apache.flink.graph.gsa;
 import org.apache.flink.api.java.tuple.Tuple2;
 
 /**
- * This class represents a {@code <sourceVertex, edge>} pair
- * This is a wrapper around {@code Tuple2<VV, EV>} for convenience in the GatherFunction
+ * This class represents a {@code <sourceVertex, edge>} pair. This is a wrapper
+ * around {@code Tuple2<VV, EV>} for convenience in the GatherFunction.
+ *
  * @param <VV> the vertex value type
  * @param <EV> the edge value type
  */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/SumFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/SumFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/SumFunction.java
index e70af1f..0d60c87 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/SumFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/gsa/SumFunction.java
@@ -56,11 +56,11 @@ public abstract class SumFunction<VV, EV, M> implements Serializable {
 
 	//---------------------------------------------------------------------------------------------
 	/**
-	 * This method is invoked once per superstep, after the {@link GatherFunction} 
+	 * This method is invoked once per superstep, after the {@link GatherFunction}
 	 * in a {@link GatherSumApplyIteration}.
 	 * It combines the partial values produced by {@link GatherFunction#gather(Neighbor)}
 	 * in pairs, until a single value has been computed.
-	 * 
+	 *
 	 * @param arg0 the first partial value.
 	 * @param arg1 the second partial value.
 	 * @return the combined value.

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java
index 74fdd69..9846286 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/CommunityDetection.java
@@ -36,14 +36,14 @@ import java.util.TreeMap;
 /**
  * Community Detection Algorithm.
  *
- * The Vertex values of the input Graph provide the initial label assignments.
- * 
- * Initially, each vertex is assigned a tuple formed of its own initial value along with a score equal to 1.0.
+ * <p>The Vertex values of the input Graph provide the initial label assignments.
+ *
+ * <p>Initially, each vertex is assigned a tuple formed of its own initial value along with a score equal to 1.0.
  * The vertices propagate their labels and max scores in iterations, each time adopting the label with the
  * highest score from the list of received messages. The chosen label is afterwards re-scored using the fraction
  * delta/the superstep number. Delta is passed as a parameter and has 0.5 as a default value.
- * 
- * @param <K> the Vertex ID type 
+ *
+ * @param <K> the Vertex ID type
  *
  */
 public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Graph<K, Long, Double>> {
@@ -56,12 +56,12 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 	 * Creates a new Community Detection algorithm instance.
 	 * The algorithm converges when vertices no longer update their value
 	 * or when the maximum number of iterations is reached.
-	 * 
+	 *
 	 * @see <a href="http://arxiv.org/pdf/0808.2633.pdf">
 	 * Towards real-time community detection in large networks</a>
-	 * 
+	 *
 	 * @param maxIterations The maximum number of iterations to run.
-	 * @param delta The hop attenuation parameter. Its default value is 0.5.  
+	 * @param delta The hop attenuation parameter. Its default value is 0.5.
 	 */
 	public CommunityDetection(int maxIterations, double delta) {
 
@@ -84,13 +84,13 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 	}
 
 	@SuppressWarnings("serial")
-	public static final class LabelMessenger<K> extends ScatterFunction<K, Tuple2<Long, Double>,
+	private static final class LabelMessenger<K> extends ScatterFunction<K, Tuple2<Long, Double>,
 			Tuple2<Long, Double>, Double> {
 
 		@Override
 		public void sendMessages(Vertex<K, Tuple2<Long, Double>> vertex) throws Exception {
 
-			for(Edge<K, Double> edge : getEdges()) {
+			for (Edge<K, Double> edge : getEdges()) {
 				sendMessageTo(edge.getTarget(), new Tuple2<>(vertex.getValue().f0,
 					vertex.getValue().f1 * edge.getValue()));
 			}
@@ -98,7 +98,7 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 	}
 
 	@SuppressWarnings("serial")
-	public static final class VertexLabelUpdater<K> extends GatherFunction<
+	private static final class VertexLabelUpdater<K> extends GatherFunction<
 			K, Tuple2<Long, Double>, Tuple2<Long, Double>> {
 
 		private double delta;
@@ -142,7 +142,7 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 				}
 			}
 
-			if(receivedLabelsWithScores.size() > 0) {
+			if (receivedLabelsWithScores.size() > 0) {
 				// find the label with the highest score from the ones received
 				double maxScore = Double.MIN_VALUE;
 				long maxScoreLabel = vertex.getValue().f0;
@@ -169,7 +169,7 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 
 	@SuppressWarnings("serial")
 	@ForwardedFields("f0")
-	public static final class AddScoreToVertexValuesMapper<K> implements MapFunction<
+	private static final class AddScoreToVertexValuesMapper<K> implements MapFunction<
 		Vertex<K, Long>, Vertex<K, Tuple2<Long, Double>>> {
 
 		public Vertex<K, Tuple2<Long, Double>> map(Vertex<K, Long> vertex) {
@@ -178,7 +178,7 @@ public class CommunityDetection<K> implements GraphAlgorithm<K, Long, Double, Gr
 	}
 
 	@SuppressWarnings("serial")
-	public static final class RemoveScoreFromVertexValuesMapper<K> implements MapFunction<
+	private static final class RemoveScoreFromVertexValuesMapper<K> implements MapFunction<
 		Vertex<K, Tuple2<Long, Double>>, Long> {
 
 		@Override

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java
index 959b816..a3110ab 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/ConnectedComponents.java
@@ -35,17 +35,17 @@ import org.apache.flink.types.NullValue;
 /**
  * A scatter-gather implementation of the Weakly Connected Components algorithm.
  *
- * This implementation uses a comparable vertex value as initial component
+ * <p>This implementation uses a comparable vertex value as initial component
  * identifier (ID). Vertices propagate their current value in each iteration.
  * Upon receiving component IDs from its neighbors, a vertex adopts a new
  * component ID if its value is lower than its current component ID.
  *
- * The algorithm converges when vertices no longer update their component ID
+ * <p>The algorithm converges when vertices no longer update their component ID
  * value or when the maximum number of iterations has been reached.
- * 
- * The result is a DataSet of vertices, where the vertex value corresponds to
+ *
+ * <p>The result is a DataSet of vertices, where the vertex value corresponds to
  * the assigned component ID.
- * 
+ *
  * @see GSAConnectedComponents
  */
 @SuppressWarnings("serial")
@@ -59,7 +59,7 @@ public class ConnectedComponents<K, VV extends Comparable<VV>, EV>
 	 * The algorithm computes weakly connected components
 	 * and converges when no vertex updates its component ID
 	 * or when the maximum number of iterations has been reached.
-	 * 
+	 *
 	 * @param maxIterations The maximum number of iterations to run.
 	 */
 	public ConnectedComponents(Integer maxIterations) {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java
index 1680f38..37e5cab 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSAConnectedComponents.java
@@ -36,18 +36,18 @@ import org.apache.flink.types.NullValue;
 /**
  * A gather-sum-apply implementation of the Weakly Connected Components algorithm.
  *
- * This implementation uses a comparable vertex value as initial component
+ * <p>This implementation uses a comparable vertex value as initial component
  * identifier (ID). In the gather phase, each vertex collects the vertex value
  * of their adjacent vertices. In the sum phase, the minimum among those values
  * is selected. In the apply phase, the algorithm sets the minimum value as the
  * new vertex value if it is smaller than the current value.
  *
- * The algorithm converges when vertices no longer update their component ID
+ * <p>The algorithm converges when vertices no longer update their component ID
  * value or when the maximum number of iterations has been reached.
  *
- * The result is a DataSet of vertices, where the vertex value corresponds to
+ * <p>The result is a DataSet of vertices, where the vertex value corresponds to
  * the assigned component ID.
- * 
+ *
  * @see ConnectedComponents
  */
 public class GSAConnectedComponents<K, VV extends Comparable<VV>, EV>
@@ -60,7 +60,7 @@ public class GSAConnectedComponents<K, VV extends Comparable<VV>, EV>
 	 * The algorithm computes weakly connected components
 	 * and converges when no vertex updates its component ID
 	 * or when the maximum number of iterations has been reached.
-	 * 
+	 *
 	 * @param maxIterations The maximum number of iterations to run.
 	 */
 	public GSAConnectedComponents(Integer maxIterations) {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java
index 3f817f4..2d0b8da 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/GSASingleSourceShortestPaths.java
@@ -29,7 +29,7 @@ import org.apache.flink.graph.gsa.Neighbor;
 import org.apache.flink.graph.gsa.SumFunction;
 
 /**
- * This is an implementation of the Single Source Shortest Paths algorithm, using a gather-sum-apply iteration
+ * This is an implementation of the Single Source Shortest Paths algorithm, using a gather-sum-apply iteration.
  */
 public class GSASingleSourceShortestPaths<K, VV> implements
 	GraphAlgorithm<K, VV, Double, DataSet<Vertex<K, Double>>> {
@@ -39,7 +39,7 @@ public class GSASingleSourceShortestPaths<K, VV> implements
 
 	/**
 	 * Creates an instance of the GSA SingleSourceShortestPaths algorithm.
-	 * 
+	 *
 	 * @param srcVertexId The ID of the source vertex.
 	 * @param maxIterations The maximum number of iterations to run.
 	 */
@@ -58,7 +58,7 @@ public class GSASingleSourceShortestPaths<K, VV> implements
 	}
 
 	@SuppressWarnings("serial")
-	public static final class InitVerticesMapper<K, VV> implements MapFunction<Vertex<K, VV>, Double> {
+	private static final class InitVerticesMapper<K, VV> implements MapFunction<Vertex<K, VV>, Double> {
 
 		private K srcVertexId;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java
index 0064a68..1e700f4 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/LabelPropagation.java
@@ -41,7 +41,7 @@ import java.util.Map.Entry;
  * detects communities by propagating labels. In each iteration, a vertex adopts
  * the label that is most frequent among its neighbors' labels.
  *
- * The initial vertex values are used as initial labels and are expected to be
+ * <p>The initial vertex values are used as initial labels and are expected to be
  * {@link Comparable}. In case of a tie (i.e. two or more labels appear with the
  * same frequency), the algorithm picks the greater label. The algorithm converges
  * when no vertex changes its value or the maximum number of iterations has been
@@ -61,10 +61,10 @@ public class LabelPropagation<K, VV extends Comparable<VV>, EV>
 	 * Creates a new Label Propagation algorithm instance.
 	 * The algorithm converges when vertices no longer update their value
 	 * or when the maximum number of iterations is reached.
-	 * 
+	 *
 	 * @see <a href="http://journals.aps.org/pre/abstract/10.1103/PhysRevE.76.036106">
 	 * Near linear time algorithm to detect community structures in large-scale networks</a>
-	 * 
+	 *
 	 * @param maxIterations The maximum number of iterations to run.
 	 */
 	public LabelPropagation(int maxIterations) {
@@ -84,7 +84,7 @@ public class LabelPropagation<K, VV extends Comparable<VV>, EV>
 	}
 
 	/**
-	 * Sends the vertex label to all out-neighbors
+	 * Sends the vertex label to all out-neighbors.
 	 */
 	public static final class SendNewLabelToNeighbors<K, VV extends Comparable<VV>>
 		extends ScatterFunction<K, VV, VV, NullValue>
@@ -108,7 +108,7 @@ public class LabelPropagation<K, VV extends Comparable<VV>, EV>
 
 	/**
 	 * Function that updates the value of a vertex by adopting the most frequent
-	 * label among its in-neighbors
+	 * label among its in-neighbors.
 	 */
 	public static final class UpdateVertexLabel<K, VV extends Comparable<VV>> extends GatherFunction<K, VV, VV> {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java
index 16b51bc..15f0a84 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/SingleSourceShortestPaths.java
@@ -39,7 +39,7 @@ public class SingleSourceShortestPaths<K, VV> implements GraphAlgorithm<K, VV, D
 
 	/**
 	 * Creates an instance of the SingleSourceShortestPaths algorithm.
-	 * 
+	 *
 	 * @param srcVertexId The ID of the source vertex.
 	 * @param maxIterations The maximum number of iterations to run.
 	 */
@@ -56,7 +56,7 @@ public class SingleSourceShortestPaths<K, VV> implements GraphAlgorithm<K, VV, D
 				maxIterations).getVertices();
 	}
 
-	public static final class InitVerticesMapper<K, VV> implements MapFunction<Vertex<K, VV>, Double> {
+	private static final class InitVerticesMapper<K, VV> implements MapFunction<Vertex<K, VV>, Double> {
 
 		private K srcVertexId;
 
@@ -94,7 +94,7 @@ public class SingleSourceShortestPaths<K, VV> implements GraphAlgorithm<K, VV, D
 	/**
 	 * Function that updates the value of a vertex by picking the minimum
 	 * distance from all incoming messages.
-	 * 
+	 *
 	 * @param <K>
 	 */
 	public static final class VertexDistanceUpdater<K> extends GatherFunction<K, Double, Double> {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java
index fed4d89..44ea988 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/Summarization.java
@@ -37,49 +37,52 @@ import org.apache.flink.types.NullValue;
 import org.apache.flink.util.Collector;
 
 /**
- * The summarization algorithm computes a condensed version of the input graph<br>
- * by grouping vertices and edges based on their values. By doing this, the<br>
- * algorithm helps to uncover insights about patterns and distributions in the<br>
+ * The summarization algorithm computes a condensed version of the input graph
+ * by grouping vertices and edges based on their values. By doing this, the
+ * algorithm helps to uncover insights about patterns and distributions in the
  * graph.
- * <p>
- * In the resulting graph, each vertex represents a group of vertices that share the<br>
- * same vertex value. An edge, that connects a vertex with itself, represents all edges<br>
- * with the same edge value that connect vertices inside that group. An edge between<br>
- * vertices in the output graph represents all edges with the same edge value between<br>
+ *
+ * <p>In the resulting graph, each vertex represents a group of vertices that share the
+ * same vertex value. An edge, that connects a vertex with itself, represents all edges
+ * with the same edge value that connect vertices inside that group. An edge between
+ * vertices in the output graph represents all edges with the same edge value between
  * members of those groups in the input graph.
- * <p>
- * Consider the following example:
- * <p>
- * Input graph:
- * <p>
- * Vertices (id, value):<br>
- * (0, "A")<br>
- * (1, "A")<br>
- * (2, "B")<br>
- * (3, "B")<br>
- * <p>
+ *
+ * <p>Consider the following example:
+ *
+ * <p>Input graph:
+ *
+ * <pre>
+ * Vertices (id, value):
+ * (0, "A")
+ * (1, "A")
+ * (2, "B")
+ * (3, "B")
+ *
  * Edges (source, target, value):
- * (0,1, null)<br>
- * (1,0, null)<br>
- * (1,2, null)<br>
- * (2,1, null)<br>
- * (2,3, null)<br>
- * (3,2, null)<br>
- * <p>
- * Output graph:
- * <p>
- * Vertices (id, (value, count)):<br>
- * (0, ("A", 2)) // 0 and 1 <br>
- * (2, ("B", 2)) // 2 and 3 <br>
- * <p>
- * Edges (source, target, (value, count)):<br>
- * (0, 0, (null, 2)) // (0,1) and (1,0) <br>
- * (2, 2, (null, 2)) // (2,3) and (3,2) <br>
- * (0, 2, (null, 1)) // (1,2) <br>
- * (2, 0, (null, 1)) // (2,1) <br>
+ * (0,1, null)
+ * (1,0, null)
+ * (1,2, null)
+ * (2,1, null)
+ * (2,3, null)
+ * (3,2, null)
+ * </pre>
+ *
+ * <p>Output graph:
+ *
+ * <pre>Vertices (id, (value, count)):
+ * (0, ("A", 2)) // 0 and 1
+ * (2, ("B", 2)) // 2 and 3
+ *
+ * Edges (source, target, (value, count)):
+ * (0, 0, (null, 2)) // (0,1) and (1,0)
+ * (2, 2, (null, 2)) // (2,3) and (3,2)
+ * (0, 2, (null, 1)) // (1,2)
+ * (2, 0, (null, 1)) // (2,1)
+ * </pre>
  *
- * Note that this implementation is non-deterministic in the way that it assigns<br>
- * identifiers to summarized vertices. However, it is guaranteed that the identifier<br>
+ * <p>Note that this implementation is non-deterministic in the way that it assigns
+ * identifiers to summarized vertices. However, it is guaranteed that the identifier
  * is one of the represented vertex identifiers.
  *
  * @param <K> 	vertex identifier type
@@ -138,8 +141,10 @@ public class Summarization<K, VV, EV>
 	/**
 	 * Value that is stored at a summarized vertex.
 	 *
+	 * <pre>
 	 * f0: vertex group value
 	 * f1: vertex group count
+	 * </pre>
 	 *
 	 * @param <VV> vertex value type
 	 */
@@ -166,8 +171,10 @@ public class Summarization<K, VV, EV>
 	/**
 	 * Value that is stored at a summarized edge.
 	 *
+	 * <pre>
 	 * f0: edge group value
 	 * f1: edge group count
+	 * </pre>
 	 *
 	 * @param <EV> edge value type
 	 */
@@ -194,10 +201,12 @@ public class Summarization<K, VV, EV>
 	/**
 	 * Represents a single vertex in a vertex group.
 	 *
+	 * <pre>
 	 * f0: vertex identifier
 	 * f1: vertex group representative identifier
 	 * f2: vertex group value
 	 * f3: vertex group count
+	 * </pre>
 	 *
 	 * @param <K> 	vertex identifier type
 	 * @param <VGV> vertex group value type
@@ -288,7 +297,7 @@ public class Summarization<K, VV, EV>
 	 * Creates one {@link VertexGroupItem} for each group element containing the vertex identifier and the identifier
 	 * of the group representative which is the first vertex in the reduce input iterable.
 	 *
-	 * Creates one {@link VertexGroupItem} representing the whole group that contains the vertex identifier of the
+	 * <p>Creates one {@link VertexGroupItem} representing the whole group that contains the vertex identifier of the
 	 * group representative, the vertex group value and the total number of group elements.
 	 *
 	 * @param <K> 	vertex identifier type

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java
index 6296618..2ae6120 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/TriangleEnumerator.java
@@ -39,24 +39,21 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-
 /**
  * This library method enumerates unique triangles present in the input graph.
  * A triangle consists of three edges that connect three vertices with each other.
  * Edge directions are ignored here.
  * The method returns a DataSet of Tuple3, where the fields of each Tuple3 contain the Vertex IDs of a triangle.
- * <p>
- * <p>
- * The basic algorithm works as follows:
- * It groups all edges that share a common vertex and builds triads, i.e., triples of vertices
- * that are connected by two edges. Finally, all triads are filtered for which no third edge exists
- * that closes the triangle.
- * <p>
- * <p>
- * For a group of <i>n</i> edges that share a common vertex, the number of built triads is quadratic <i>((n*(n-1))/2)</i>.
+ *
+ * <p>The basic algorithm groups all edges that share a common vertex and builds triads,
+ * i.e., triples of vertices that are connected by two edges. Then all triads are filtered
+ * for which no third edge exists that closes the triangle.
+ *
+ * <p>For a group of <i>n</i> edges that share a common vertex, the number of built triads is quadratic <i>((n*(n-1))/2)</i>.
  * Therefore, an optimization of the algorithm is to group edges on the vertex with the smaller output degree to
  * reduce the number of triads.
- * This implementation extends the basic algorithm by computing output degrees of edge vertices and
+ *
+ * <p>This implementation extends the basic algorithm by computing output degrees of edge vertices and
  * grouping on edges on the vertex with the smaller degree.
  */
 public class TriangleEnumerator<K extends Comparable<K>, VV, EV> implements
@@ -276,6 +273,11 @@ public class TriangleEnumerator<K extends Comparable<K>, VV, EV> implements
 		}
 	}
 
+	/**
+	 * POJO storing two vertex IDs with degree.
+	 *
+	 * @param <K> vertex ID type
+	 */
 	@SuppressWarnings("serial")
 	public static final class EdgeWithDegrees<K> extends Tuple4<K, K, Integer, Integer> {
 
@@ -324,6 +326,11 @@ public class TriangleEnumerator<K extends Comparable<K>, VV, EV> implements
 		}
 	}
 
+	/**
+	 * Tuple storing three vertex IDs.
+	 *
+	 * @param <K> vertex ID type
+	 */
 	public static final class Triad<K> extends Tuple3<K, K, K> {
 		private static final long serialVersionUID = 1L;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficient.java
index 46bed68..bfeb3d5 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/AverageClusteringCoefficient.java
@@ -18,18 +18,19 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.DoubleCounter;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient.Result;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
@@ -43,7 +44,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class AverageClusteringCoefficient<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String VERTEX_COUNT = "vertexCount";
 
@@ -181,11 +182,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(vertexCount, rhs.vertexCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficient.java
index 20a6ec5..03f06b1 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/GlobalClusteringCoefficient.java
@@ -18,17 +18,18 @@
 
 package org.apache.flink.graph.library.clustering.directed;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.dataset.Count;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient.Result;
 import org.apache.flink.graph.library.metric.directed.VertexMetrics;
 import org.apache.flink.types.CopyableValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
@@ -40,7 +41,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class GlobalClusteringCoefficient<K extends Comparable<K> & CopyableValue<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private Count<TriangleListing.Result<K>> triangleCount;
 
@@ -142,13 +143,13 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		 * number of closed triplets (triangles) divided by the total number of
 		 * triplets.
 		 *
-		 * A score of {@code Double.NaN} is returned for a graph of isolated vertices
+		 * <p>A score of {@code Double.NaN} is returned for a graph of isolated vertices
 		 * for which both the triangle count and number of neighbors are zero.
 		 *
 		 * @return global clustering coefficient score
 		 */
 		public double getGlobalClusteringCoefficientScore() {
-			return (tripletCount == 0) ? Double.NaN : triangleCount / (double)tripletCount;
+			return (tripletCount == 0) ? Double.NaN : triangleCount / (double) tripletCount;
 		}
 
 		@Override
@@ -168,11 +169,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(tripletCount, rhs.tripletCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java
index 92b14e3..b980244 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/clustering/directed/LocalClusteringCoefficient.java
@@ -35,7 +35,7 @@ import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.asm.result.UnaryResult;
 import org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient.Result;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.graph.utils.proxy.OptionalBoolean;
 import org.apache.flink.types.CopyableValue;
@@ -49,12 +49,12 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * The local clustering coefficient measures the connectedness of each vertex's
  * neighborhood. Scores range from 0.0 (no edges between neighbors) to 1.0
  * (neighborhood is a clique).
- * <p>
- * An edge between a vertex's neighbors is a triangle. Counting edges between
+ *
+ * <p>An edge between a vertex's neighbors is a triangle. Counting edges between
  * neighbors is equivalent to counting the number of triangles which include
  * the vertex.
- * <p>
- * The input graph must be a simple graph containing no duplicate edges or
+ *
+ * <p>The input graph must be a simple graph containing no duplicate edges or
  * self-loops.
  *
  * @param <K> graph ID type
@@ -98,6 +98,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 
 		return this;
 	}
+
 	@Override
 	protected String getAlgorithmName() {
 		return LocalClusteringCoefficient.class.getName();
@@ -107,7 +108,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! LocalClusteringCoefficient.class.isAssignableFrom(other.getClass())) {
+		if (!LocalClusteringCoefficient.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -257,7 +258,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	implements PrintableResult, UnaryResult<T> {
 		public static final int HASH_SEED = 0x37a208c4;
 
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
 
 		@Override
 		public T getVertexId0() {
@@ -293,7 +294,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 		 * number of edges between neighbors, equal to the triangle count,
 		 * divided by the number of potential edges between neighbors.
 		 *
-		 * A score of {@code Double.NaN} is returned for a vertex with degree 1
+		 * <p>A score of {@code Double.NaN} is returned for a vertex with degree 1
 		 * for which both the triangle count and number of neighbors are zero.
 		 *
 		 * @return local clustering coefficient score
@@ -302,7 +303,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			long degree = getDegree().getValue();
 			long neighborPairs = degree * (degree - 1);
 
-			return (neighborPairs == 0) ? Double.NaN : getTriangleCount().getValue() / (double)neighborPairs;
+			return (neighborPairs == 0) ? Double.NaN : getTriangleCount().getValue() / (double) neighborPairs;
 		}
 
 		/**


[07/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java
new file mode 100644
index 0000000..e8422ac
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/HITS.java
@@ -0,0 +1,582 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.library.linkanalysis;
+
+import org.apache.flink.api.common.aggregators.ConvergenceCriterion;
+import org.apache.flink.api.common.aggregators.DoubleSumAggregator;
+import org.apache.flink.api.common.functions.CoGroupFunction;
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.ReduceFunction;
+import org.apache.flink.api.common.functions.RichJoinFunction;
+import org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint;
+import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond;
+import org.apache.flink.api.java.operators.IterativeDataSet;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.graph.Edge;
+import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.asm.result.PrintableResult;
+import org.apache.flink.graph.asm.result.UnaryResult;
+import org.apache.flink.graph.library.linkanalysis.Functions.SumScore;
+import org.apache.flink.graph.library.linkanalysis.HITS.Result;
+import org.apache.flink.graph.utils.MurmurHash;
+import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
+import org.apache.flink.types.DoubleValue;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.util.Collection;
+
+import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
+
+/**
+ * Hyperlink-Induced Topic Search computes two interdependent scores for every
+ * vertex in a directed graph. A good "hub" links to good "authorities" and
+ * good "authorities" are linked from good "hubs".
+ *
+ * <p>This algorithm can be configured to terminate either by a limit on the number
+ * of iterations, a convergence threshold, or both.
+ *
+ * <p>See http://www.cs.cornell.edu/home/kleinber/auth.pdf
+ *
+ * @param <K> graph ID type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
+ */
+public class HITS<K, VV, EV>
+extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
+
+	private static final String CHANGE_IN_SCORES = "change in scores";
+
+	private static final String HUBBINESS_SUM_SQUARED = "hubbiness sum squared";
+
+	private static final String AUTHORITY_SUM_SQUARED = "authority sum squared";
+
+	// Required configuration
+	private int maxIterations;
+
+	private double convergenceThreshold;
+
+	// Optional configuration
+	private int parallelism = PARALLELISM_DEFAULT;
+
+	/**
+	 * Hyperlink-Induced Topic Search with a fixed number of iterations.
+	 *
+	 * @param iterations fixed number of iterations
+	 */
+	public HITS(int iterations) {
+		this(iterations, Double.MAX_VALUE);
+	}
+
+	/**
+	 * Hyperlink-Induced Topic Search with a convergence threshold. The algorithm
+	 * terminates when the total change in hub and authority scores over all
+	 * vertices falls to or below the given threshold value.
+	 *
+	 * @param convergenceThreshold convergence threshold for sum of scores
+	 */
+	public HITS(double convergenceThreshold) {
+		this(Integer.MAX_VALUE, convergenceThreshold);
+	}
+
+	/**
+	 * Hyperlink-Induced Topic Search with a convergence threshold and a maximum
+	 * iteration count. The algorithm terminates after either the given number
+	 * of iterations or when the total change in hub and authority scores over all
+	 * vertices falls to or below the given threshold value.
+	 *
+	 * @param maxIterations maximum number of iterations
+	 * @param convergenceThreshold convergence threshold for sum of scores
+	 */
+	public HITS(int maxIterations, double convergenceThreshold) {
+		Preconditions.checkArgument(maxIterations > 0, "Number of iterations must be greater than zero");
+		Preconditions.checkArgument(convergenceThreshold > 0.0, "Convergence threshold must be greater than zero");
+
+		this.maxIterations = maxIterations;
+		this.convergenceThreshold = convergenceThreshold;
+	}
+
+	/**
+	 * Override the operator parallelism.
+	 *
+	 * @param parallelism operator parallelism
+	 * @return this
+	 */
+	public HITS<K, VV, EV> setParallelism(int parallelism) {
+		this.parallelism = parallelism;
+
+		return this;
+	}
+
+	@Override
+	protected String getAlgorithmName() {
+		return HITS.class.getName();
+	}
+
+	@Override
+	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
+		Preconditions.checkNotNull(other);
+
+		if (!HITS.class.isAssignableFrom(other.getClass())) {
+			return false;
+		}
+
+		HITS rhs = (HITS) other;
+
+		// merge configurations
+
+		maxIterations = Math.max(maxIterations, rhs.maxIterations);
+		convergenceThreshold = Math.min(convergenceThreshold, rhs.convergenceThreshold);
+		parallelism = (parallelism == PARALLELISM_DEFAULT) ? rhs.parallelism :
+			((rhs.parallelism == PARALLELISM_DEFAULT) ? parallelism : Math.min(parallelism, rhs.parallelism));
+
+		return true;
+	}
+
+	@Override
+	public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
+			throws Exception {
+		DataSet<Tuple2<K, K>> edges = input
+			.getEdges()
+			.map(new ExtractEdgeIDs<K, EV>())
+				.setParallelism(parallelism)
+				.name("Extract edge IDs");
+
+		// ID, hub, authority
+		DataSet<Tuple3<K, DoubleValue, DoubleValue>> initialScores = edges
+			.map(new InitializeScores<K>())
+				.setParallelism(parallelism)
+				.name("Initial scores")
+			.groupBy(0)
+			.reduce(new SumScores<K>())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		IterativeDataSet<Tuple3<K, DoubleValue, DoubleValue>> iterative = initialScores
+			.iterate(maxIterations);
+
+		// ID, hubbiness
+		DataSet<Tuple2<K, DoubleValue>> hubbiness = iterative
+			.coGroup(edges)
+			.where(0)
+			.equalTo(1)
+			.with(new Hubbiness<K>())
+				.setParallelism(parallelism)
+				.name("Hub")
+			.groupBy(0)
+			.reduce(new SumScore<K>())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// sum-of-hubbiness-squared
+		DataSet<DoubleValue> hubbinessSumSquared = hubbiness
+			.map(new Square<K>())
+				.setParallelism(parallelism)
+				.name("Square")
+			.reduce(new Sum())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// ID, new authority
+		DataSet<Tuple2<K, DoubleValue>> authority = hubbiness
+			.coGroup(edges)
+			.where(0)
+			.equalTo(0)
+			.with(new Authority<K>())
+				.setParallelism(parallelism)
+				.name("Authority")
+			.groupBy(0)
+			.reduce(new SumScore<K>())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// sum-of-authority-squared
+		DataSet<DoubleValue> authoritySumSquared = authority
+			.map(new Square<K>())
+				.setParallelism(parallelism)
+				.name("Square")
+			.reduce(new Sum())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// ID, normalized hubbiness, normalized authority
+		DataSet<Tuple3<K, DoubleValue, DoubleValue>> scores = hubbiness
+			.fullOuterJoin(authority, JoinHint.REPARTITION_SORT_MERGE)
+			.where(0)
+			.equalTo(0)
+			.with(new JoinAndNormalizeHubAndAuthority<K>())
+			.withBroadcastSet(hubbinessSumSquared, HUBBINESS_SUM_SQUARED)
+			.withBroadcastSet(authoritySumSquared, AUTHORITY_SUM_SQUARED)
+				.setParallelism(parallelism)
+				.name("Join scores");
+
+		DataSet<Tuple3<K, DoubleValue, DoubleValue>> passThrough;
+
+		if (convergenceThreshold < Double.MAX_VALUE) {
+			passThrough = iterative
+				.fullOuterJoin(scores, JoinHint.REPARTITION_SORT_MERGE)
+				.where(0)
+				.equalTo(0)
+				.with(new ChangeInScores<K>())
+					.setParallelism(parallelism)
+					.name("Change in scores");
+
+			iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
+		} else {
+			passThrough = scores;
+		}
+
+		return iterative
+			.closeWith(passThrough)
+			.map(new TranslateResult<K>())
+				.setParallelism(parallelism)
+				.name("Map result");
+	}
+
+	/**
+	 * Map edges and remove the edge value.
+	 *
+	 * @param <T> ID type
+	 * @param <ET> edge value type
+	 *
+	 * @see Graph.ExtractEdgeIDsMapper
+	 */
+	@ForwardedFields("0; 1")
+	private static class ExtractEdgeIDs<T, ET>
+	implements MapFunction<Edge<T, ET>, Tuple2<T, T>> {
+		private Tuple2<T, T> output = new Tuple2<>();
+
+		@Override
+		public Tuple2<T, T> map(Edge<T, ET> value)
+				throws Exception {
+			output.f0 = value.f0;
+			output.f1 = value.f1;
+			return output;
+		}
+	}
+
+	/**
+	 * Initialize vertices' authority scores by assigning each vertex with an
+	 * initial hub score of 1.0. The hub scores are initialized to zero since
+	 * these will be computed based on the initial authority scores.
+	 *
+	 * <p>The initial scores are non-normalized.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("1->0")
+	private static class InitializeScores<T>
+	implements MapFunction<Tuple2<T, T>, Tuple3<T, DoubleValue, DoubleValue>> {
+		private Tuple3<T, DoubleValue, DoubleValue> output = new Tuple3<>(null, new DoubleValue(0.0), new DoubleValue(1.0));
+
+		@Override
+		public Tuple3<T, DoubleValue, DoubleValue> map(Tuple2<T, T> value) throws Exception {
+			output.f0 = value.f1;
+			return output;
+		}
+	}
+
+	/**
+	 * Sum vertices' hub and authority scores.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	private static class SumScores<T>
+	implements ReduceFunction<Tuple3<T, DoubleValue, DoubleValue>> {
+		@Override
+		public Tuple3<T, DoubleValue, DoubleValue> reduce(Tuple3<T, DoubleValue, DoubleValue> left, Tuple3<T, DoubleValue, DoubleValue> right)
+				throws Exception {
+			left.f1.setValue(left.f1.getValue() + right.f1.getValue());
+			left.f2.setValue(left.f2.getValue() + right.f2.getValue());
+			return left;
+		}
+	}
+
+	/**
+	 * The hub score is the sum of authority scores of vertices on out-edges.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsFirst("2->1")
+	@ForwardedFieldsSecond("0")
+	private static class Hubbiness<T>
+	implements CoGroupFunction<Tuple3<T, DoubleValue, DoubleValue>, Tuple2<T, T>, Tuple2<T, DoubleValue>> {
+		private Tuple2<T, DoubleValue> output = new Tuple2<>();
+
+		@Override
+		public void coGroup(Iterable<Tuple3<T, DoubleValue, DoubleValue>> vertex, Iterable<Tuple2<T, T>> edges, Collector<Tuple2<T, DoubleValue>> out)
+				throws Exception {
+			output.f1 = vertex.iterator().next().f2;
+
+			for (Tuple2<T, T> edge : edges) {
+				output.f0 = edge.f0;
+				out.collect(output);
+			}
+		}
+	}
+
+	/**
+	 * The authority score is the sum of hub scores of vertices on in-edges.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsFirst("1")
+	@ForwardedFieldsSecond("1->0")
+	private static class Authority<T>
+	implements CoGroupFunction<Tuple2<T, DoubleValue>, Tuple2<T, T>, Tuple2<T, DoubleValue>> {
+		private Tuple2<T, DoubleValue> output = new Tuple2<>();
+
+		@Override
+		public void coGroup(Iterable<Tuple2<T, DoubleValue>> vertex, Iterable<Tuple2<T, T>> edges, Collector<Tuple2<T, DoubleValue>> out)
+				throws Exception {
+			output.f1 = vertex.iterator().next().f1;
+
+			for (Tuple2<T, T> edge : edges) {
+				output.f0 = edge.f1;
+				out.collect(output);
+			}
+		}
+	}
+
+	/**
+	 * Compute the square of each score.
+	 *
+	 * @param <T> ID type
+	 */
+	private static class Square<T>
+	implements MapFunction<Tuple2<T, DoubleValue>, DoubleValue> {
+		private DoubleValue output = new DoubleValue();
+
+		@Override
+		public DoubleValue map(Tuple2<T, DoubleValue> value)
+				throws Exception {
+			double val = value.f1.getValue();
+			output.setValue(val * val);
+
+			return output;
+		}
+	}
+
+	/**
+	 * Sum over values. This specialized function is used in place of generic aggregation.
+	 */
+	private static class Sum
+	implements ReduceFunction<DoubleValue> {
+		@Override
+		public DoubleValue reduce(DoubleValue first, DoubleValue second)
+				throws Exception {
+			first.setValue(first.getValue() + second.getValue());
+			return first;
+		}
+	}
+
+	/**
+	 * Join and normalize the hub and authority scores.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsFirst("0")
+	@ForwardedFieldsSecond("0")
+	private static class JoinAndNormalizeHubAndAuthority<T>
+	extends RichJoinFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>> {
+		private Tuple3<T, DoubleValue, DoubleValue> output = new Tuple3<>(null, new DoubleValue(), new DoubleValue());
+
+		private double hubbinessRootSumSquared;
+
+		private double authorityRootSumSquared;
+
+		@Override
+		public void open(Configuration parameters) throws Exception {
+			super.open(parameters);
+
+			Collection<DoubleValue> var;
+			var = getRuntimeContext().getBroadcastVariable(HUBBINESS_SUM_SQUARED);
+			hubbinessRootSumSquared = Math.sqrt(var.iterator().next().getValue());
+
+			var = getRuntimeContext().getBroadcastVariable(AUTHORITY_SUM_SQUARED);
+			authorityRootSumSquared = Math.sqrt(var.iterator().next().getValue());
+		}
+
+		@Override
+		public Tuple3<T, DoubleValue, DoubleValue> join(Tuple2<T, DoubleValue> hubbiness, Tuple2<T, DoubleValue> authority)
+				throws Exception {
+			output.f0 = (authority == null) ? hubbiness.f0 : authority.f0;
+			output.f1.setValue(hubbiness == null ? 0.0 : hubbiness.f1.getValue() / hubbinessRootSumSquared);
+			output.f2.setValue(authority == null ? 0.0 : authority.f1.getValue() / authorityRootSumSquared);
+			return output;
+		}
+	}
+
+	/**
+	 * Computes the total sum of the change in hub and authority scores over
+	 * all vertices between iterations. A negative score is emitted after the
+	 * first iteration to prevent premature convergence.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsFirst("0")
+	@ForwardedFieldsSecond("*")
+	private static class ChangeInScores<T>
+	extends RichJoinFunction<Tuple3<T, DoubleValue, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>, Tuple3<T, DoubleValue, DoubleValue>> {
+		private boolean isInitialSuperstep;
+
+		private double changeInScores;
+
+		@Override
+		public void open(Configuration parameters)
+				throws Exception {
+			super.open(parameters);
+
+			isInitialSuperstep = (getIterationRuntimeContext().getSuperstepNumber() == 1);
+			changeInScores = (isInitialSuperstep) ? -1.0 : 0.0;
+		}
+
+		@Override
+		public void close()
+				throws Exception {
+			super.close();
+
+			DoubleSumAggregator agg = getIterationRuntimeContext().getIterationAggregator(CHANGE_IN_SCORES);
+			agg.aggregate(changeInScores);
+		}
+
+		@Override
+		public Tuple3<T, DoubleValue, DoubleValue> join(Tuple3<T, DoubleValue, DoubleValue> first, Tuple3<T, DoubleValue, DoubleValue> second)
+				throws Exception {
+			if (!isInitialSuperstep) {
+				changeInScores += Math.abs(second.f1.getValue() - first.f1.getValue());
+				changeInScores += Math.abs(second.f2.getValue() - first.f2.getValue());
+			}
+
+			return second;
+		}
+	}
+
+	/**
+	 * Monitors the total change in hub and authority scores over all vertices.
+	 * The algorithm terminates when the change in scores compared against the
+	 * prior iteration falls to or below the given convergence threshold.
+	 *
+	 * <p>An optimization of this implementation of HITS is to leave the initial
+	 * scores non-normalized; therefore, the change in scores after the first
+	 * superstep cannot be measured and a negative value is emitted to signal
+	 * that the iteration should continue.
+	 */
+	private static class ScoreConvergence
+	implements ConvergenceCriterion<DoubleValue> {
+		private double convergenceThreshold;
+
+		public ScoreConvergence(double convergenceThreshold) {
+			this.convergenceThreshold = convergenceThreshold;
+		}
+
+		@Override
+		public boolean isConverged(int iteration, DoubleValue value) {
+			double val = value.getValue();
+			return (0 <= val && val <= convergenceThreshold);
+		}
+	}
+
+	/**
+	 * Map the Tuple result to the return type.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0; 1; 2")
+	private static class TranslateResult<T>
+	implements MapFunction<Tuple3<T, DoubleValue, DoubleValue>, Result<T>> {
+		private Result<T> output = new Result<>();
+
+		@Override
+		public Result<T> map(Tuple3<T, DoubleValue, DoubleValue> value) throws Exception {
+			output.f0 = value.f0;
+			output.f1 = value.f1;
+			output.f2 = value.f2;
+			return output;
+		}
+	}
+
+	/**
+	 * Wraps the {@link Tuple3} to encapsulate results from the HITS algorithm.
+	 *
+	 * @param <T> ID type
+	 */
+	public static class Result<T>
+	extends Tuple3<T, DoubleValue, DoubleValue>
+	implements PrintableResult, UnaryResult<T> {
+		public static final int HASH_SEED = 0xc7e39a63;
+
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
+
+		@Override
+		public T getVertexId0() {
+			return f0;
+		}
+
+		@Override
+		public void setVertexId0(T value) {
+			f0 = value;
+		}
+
+		/**
+		 * Get the hub score. Good hubs link to good authorities.
+		 *
+		 * @return the hub score
+		 */
+		public DoubleValue getHubScore() {
+			return f1;
+		}
+
+		/**
+		 * Get the authority score. Good authorities link to good hubs.
+		 *
+		 * @return the authority score
+		 */
+		public DoubleValue getAuthorityScore() {
+			return f2;
+		}
+
+		public String toPrintableString() {
+			return "Vertex ID: " + getVertexId0()
+				+ ", hub score: " + getHubScore()
+				+ ", authority score: " + getAuthorityScore();
+		}
+
+		@Override
+		public int hashCode() {
+			return hasher.reset()
+				.hash(f0.hashCode())
+				.hash(f1.getValue())
+				.hash(f2.getValue())
+				.hash();
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java
new file mode 100644
index 0000000..ecd5f39
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/linkanalysis/PageRank.java
@@ -0,0 +1,544 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.library.linkanalysis;
+
+import org.apache.flink.api.common.aggregators.ConvergenceCriterion;
+import org.apache.flink.api.common.aggregators.DoubleSumAggregator;
+import org.apache.flink.api.common.functions.CoGroupFunction;
+import org.apache.flink.api.common.functions.FlatMapFunction;
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.common.functions.ReduceFunction;
+import org.apache.flink.api.common.functions.RichJoinFunction;
+import org.apache.flink.api.common.functions.RichMapFunction;
+import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsFirst;
+import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFieldsSecond;
+import org.apache.flink.api.java.operators.IterativeDataSet;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.graph.Edge;
+import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.Vertex;
+import org.apache.flink.graph.asm.degree.annotate.directed.EdgeSourceDegrees;
+import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees;
+import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
+import org.apache.flink.graph.asm.result.PrintableResult;
+import org.apache.flink.graph.asm.result.UnaryResult;
+import org.apache.flink.graph.library.linkanalysis.Functions.SumScore;
+import org.apache.flink.graph.library.linkanalysis.PageRank.Result;
+import org.apache.flink.graph.utils.GraphUtils;
+import org.apache.flink.graph.utils.MurmurHash;
+import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
+import org.apache.flink.types.DoubleValue;
+import org.apache.flink.types.LongValue;
+import org.apache.flink.util.Collector;
+import org.apache.flink.util.Preconditions;
+
+import java.util.Collection;
+import java.util.Iterator;
+
+import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
+
+/**
+ * PageRank computes a per-vertex score which is the sum of PageRank scores
+ * transmitted over in-edges. Each vertex's score is divided evenly among
+ * out-edges. High-scoring vertices are linked to by other high-scoring
+ * vertices; this is similar to the 'authority' score in {@link HITS}.
+ *
+ * <p>See http://ilpubs.stanford.edu:8090/422/1/1999-66.pdf
+ *
+ * @param <K> graph ID type
+ * @param <VV> vertex value type
+ * @param <EV> edge value type
+ */
+public class PageRank<K, VV, EV>
+extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
+
+	private static final String VERTEX_COUNT = "vertex count";
+
+	private static final String SUM_OF_SCORES = "sum of scores";
+
+	private static final String CHANGE_IN_SCORES = "change in scores";
+
+	// Required configuration
+	private final double dampingFactor;
+
+	private int maxIterations;
+
+	private double convergenceThreshold;
+
+	// Optional configuration
+	private int parallelism = PARALLELISM_DEFAULT;
+
+	/**
+	 * PageRank with a fixed number of iterations.
+	 *
+	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
+	 * @param iterations fixed number of iterations
+	 */
+	public PageRank(double dampingFactor, int iterations) {
+		this(dampingFactor, iterations, Double.MAX_VALUE);
+	}
+
+	/**
+	 * PageRank with a convergence threshold. The algorithm terminates when the
+	 * change in score over all vertices falls to or below the given threshold value.
+	 *
+	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
+	 * @param convergenceThreshold convergence threshold for sum of scores
+	 */
+	public PageRank(double dampingFactor, double convergenceThreshold) {
+		this(dampingFactor, Integer.MAX_VALUE, convergenceThreshold);
+	}
+
+	/**
+	 * PageRank with a convergence threshold and a maximum iteration count. The
+	 * algorithm terminates after either the given number of iterations or when
+	 * the change in score over all vertices falls to or below the given
+	 * threshold value.
+	 *
+	 * @param dampingFactor probability of following an out-link, otherwise jump to a random vertex
+	 * @param maxIterations maximum number of iterations
+	 * @param convergenceThreshold convergence threshold for sum of scores
+	 */
+	public PageRank(double dampingFactor, int maxIterations, double convergenceThreshold) {
+		Preconditions.checkArgument(0 < dampingFactor && dampingFactor < 1,
+			"Damping factor must be between zero and one");
+		Preconditions.checkArgument(maxIterations > 0, "Number of iterations must be greater than zero");
+		Preconditions.checkArgument(convergenceThreshold > 0.0, "Convergence threshold must be greater than zero");
+
+		this.dampingFactor = dampingFactor;
+		this.maxIterations = maxIterations;
+		this.convergenceThreshold = convergenceThreshold;
+	}
+
+	/**
+	 * Override the operator parallelism.
+	 *
+	 * @param parallelism operator parallelism
+	 * @return this
+	 */
+	public PageRank<K, VV, EV> setParallelism(int parallelism) {
+		this.parallelism = parallelism;
+
+		return this;
+	}
+
+	@Override
+	protected String getAlgorithmName() {
+		return PageRank.class.getName();
+	}
+
+	@Override
+	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
+		Preconditions.checkNotNull(other);
+
+		if (!PageRank.class.isAssignableFrom(other.getClass())) {
+			return false;
+		}
+
+		PageRank rhs = (PageRank) other;
+
+		// merge configurations
+
+		maxIterations = Math.max(maxIterations, rhs.maxIterations);
+		convergenceThreshold = Math.min(convergenceThreshold, rhs.convergenceThreshold);
+		parallelism = (parallelism == PARALLELISM_DEFAULT) ? rhs.parallelism :
+			((rhs.parallelism == PARALLELISM_DEFAULT) ? parallelism : Math.min(parallelism, rhs.parallelism));
+
+		return true;
+	}
+
+	@Override
+	public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input)
+			throws Exception {
+		// vertex degree
+		DataSet<Vertex<K, Degrees>> vertexDegree = input
+			.run(new VertexDegrees<K, VV, EV>()
+				.setParallelism(parallelism));
+
+		// vertex count
+		DataSet<LongValue> vertexCount = GraphUtils.count(vertexDegree);
+
+		// s, t, d(s)
+		DataSet<Edge<K, LongValue>> edgeSourceDegree = input
+			.run(new EdgeSourceDegrees<K, VV, EV>()
+				.setParallelism(parallelism))
+			.map(new ExtractSourceDegree<K, EV>())
+				.setParallelism(parallelism)
+				.name("Extract source degree");
+
+		// vertices with zero in-edges
+		DataSet<Tuple2<K, DoubleValue>> sourceVertices = vertexDegree
+			.flatMap(new InitializeSourceVertices<K>())
+			.withBroadcastSet(vertexCount, VERTEX_COUNT)
+				.setParallelism(parallelism)
+				.name("Initialize source vertex scores");
+
+		// s, initial pagerank(s)
+		DataSet<Tuple2<K, DoubleValue>> initialScores = vertexDegree
+			.map(new InitializeVertexScores<K>())
+			.withBroadcastSet(vertexCount, VERTEX_COUNT)
+				.setParallelism(parallelism)
+				.name("Initialize scores");
+
+		IterativeDataSet<Tuple2<K, DoubleValue>> iterative = initialScores
+			.iterate(maxIterations);
+
+		// s, projected pagerank(s)
+		DataSet<Tuple2<K, DoubleValue>> vertexScores = iterative
+			.coGroup(edgeSourceDegree)
+			.where(0)
+			.equalTo(0)
+			.with(new SendScore<K>())
+				.setParallelism(parallelism)
+				.name("Send score")
+			.groupBy(0)
+			.reduce(new SumScore<K>())
+			.setCombineHint(CombineHint.HASH)
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// ignored ID, total pagerank
+		DataSet<Tuple2<K, DoubleValue>> sumOfScores = vertexScores
+			.reduce(new SumVertexScores<K>())
+				.setParallelism(parallelism)
+				.name("Sum");
+
+		// s, adjusted pagerank(s)
+		DataSet<Tuple2<K, DoubleValue>> adjustedScores = vertexScores
+			.union(sourceVertices)
+				.setParallelism(parallelism)
+				.name("Union with source vertices")
+			.map(new AdjustScores<K>(dampingFactor))
+				.withBroadcastSet(sumOfScores, SUM_OF_SCORES)
+				.withBroadcastSet(vertexCount, VERTEX_COUNT)
+					.setParallelism(parallelism)
+					.name("Adjust scores");
+
+		DataSet<Tuple2<K, DoubleValue>> passThrough;
+
+		if (convergenceThreshold < Double.MAX_VALUE) {
+			passThrough = iterative
+				.join(adjustedScores)
+				.where(0)
+				.equalTo(0)
+				.with(new ChangeInScores<K>())
+					.setParallelism(parallelism)
+					.name("Change in scores");
+
+			iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
+		} else {
+			passThrough = adjustedScores;
+		}
+
+		return iterative
+			.closeWith(passThrough)
+			.map(new TranslateResult<K>())
+				.setParallelism(parallelism)
+				.name("Map result");
+	}
+
+	/**
+	 * Remove the unused original edge value and extract the out-degree.
+	 *
+	 * @param <T> ID type
+	 * @param <ET> edge value type
+	 */
+	@ForwardedFields("0; 1")
+	private static class ExtractSourceDegree<T, ET>
+	implements MapFunction<Edge<T, Tuple2<ET, Degrees>>, Edge<T, LongValue>> {
+		Edge<T, LongValue> output = new Edge<>();
+
+		@Override
+		public Edge<T, LongValue> map(Edge<T, Tuple2<ET, Degrees>> edge)
+				throws Exception {
+			output.f0 = edge.f0;
+			output.f1 = edge.f1;
+			output.f2 = edge.f2.f1.getOutDegree();
+			return output;
+		}
+	}
+
+	/**
+	 * Source vertices have no in-edges so have a projected score of 0.0.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	private static class InitializeSourceVertices<T>
+	implements FlatMapFunction<Vertex<T, Degrees>, Tuple2<T, DoubleValue>> {
+		private Tuple2<T, DoubleValue> output = new Tuple2<>(null, new DoubleValue(0.0));
+
+		@Override
+		public void flatMap(Vertex<T, Degrees> vertex, Collector<Tuple2<T, DoubleValue>> out)
+				throws Exception {
+			if (vertex.f1.getInDegree().getValue() == 0) {
+				output.f0 = vertex.f0;
+				out.collect(output);
+			}
+		}
+	}
+
+	/**
+	 * PageRank scores sum to 1.0 so initialize each vertex with the inverse of
+	 * the number of vertices.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	private static class InitializeVertexScores<T>
+	extends RichMapFunction<Vertex<T, Degrees>, Tuple2<T, DoubleValue>> {
+		private Tuple2<T, DoubleValue> output = new Tuple2<>();
+
+		@Override
+		public void open(Configuration parameters)
+				throws Exception {
+			super.open(parameters);
+
+			Collection<LongValue> vertexCount = getRuntimeContext().getBroadcastVariable(VERTEX_COUNT);
+			output.f1 = new DoubleValue(1.0 / vertexCount.iterator().next().getValue());
+		}
+
+		@Override
+		public Tuple2<T, DoubleValue> map(Vertex<T, Degrees> vertex)
+				throws Exception {
+			output.f0 = vertex.f0;
+			return output;
+		}
+	}
+
+	/**
+	 * The PageRank score for each vertex is divided evenly and projected to
+	 * neighbors on out-edges.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsSecond("1->0")
+	private static class SendScore<T>
+	implements CoGroupFunction<Tuple2<T, DoubleValue>, Edge<T, LongValue>, Tuple2<T, DoubleValue>> {
+		private Tuple2<T, DoubleValue> output = new Tuple2<>(null, new DoubleValue());
+
+		@Override
+		public void coGroup(Iterable<Tuple2<T, DoubleValue>> vertex, Iterable<Edge<T, LongValue>> edges, Collector<Tuple2<T, DoubleValue>> out)
+				throws Exception {
+			Iterator<Edge<T, LongValue>> edgeIterator = edges.iterator();
+
+			if (edgeIterator.hasNext()) {
+				Edge<T, LongValue> edge = edgeIterator.next();
+
+				output.f0 = edge.f1;
+				output.f1.setValue(vertex.iterator().next().f1.getValue() / edge.f2.getValue());
+				out.collect(output);
+
+				while (edgeIterator.hasNext()) {
+					edge = edgeIterator.next();
+					output.f0 = edge.f1;
+					out.collect(output);
+				}
+			}
+		}
+	}
+
+	/**
+	 * Sum the PageRank score over all vertices. The vertex ID must be ignored
+	 * but is retained rather than adding another operator.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	private static class SumVertexScores<T>
+	implements ReduceFunction<Tuple2<T, DoubleValue>> {
+		@Override
+		public Tuple2<T, DoubleValue> reduce(Tuple2<T, DoubleValue> first, Tuple2<T, DoubleValue> second)
+				throws Exception {
+			first.f1.setValue(first.f1.getValue() + second.f1.getValue());
+			return first;
+		}
+	}
+
+	/**
+	 * Each iteration the per-vertex scores are adjusted with the damping
+	 * factor. Each score is multiplied by the damping factor then added to the
+	 * probability of a "random hop", which is one minus the damping factor.
+	 *
+	 * <p>This operation also accounts for 'sink' vertices, which have no
+	 * out-edges to project score to. The sink scores are computed by taking
+	 * one minus the sum of vertex scores, which also includes precision error.
+	 * This 'missing' score is evenly distributed across vertices as with the
+	 * random hop.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0")
+	private static class AdjustScores<T>
+	extends RichMapFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>> {
+		private double dampingFactor;
+
+		private long vertexCount;
+
+		private double uniformlyDistributedScore;
+
+		public AdjustScores(double dampingFactor) {
+			this.dampingFactor = dampingFactor;
+		}
+
+		@Override
+		public void open(Configuration parameters) throws Exception {
+			super.open(parameters);
+
+			Collection<Tuple2<T, DoubleValue>> sumOfScores = getRuntimeContext().getBroadcastVariable(SUM_OF_SCORES);
+			// floating point precision error is also included in sumOfSinks
+			double sumOfSinks = 1 - sumOfScores.iterator().next().f1.getValue();
+
+			Collection<LongValue> vertexCount = getRuntimeContext().getBroadcastVariable(VERTEX_COUNT);
+			this.vertexCount = vertexCount.iterator().next().getValue();
+
+			this.uniformlyDistributedScore = ((1 - dampingFactor) + dampingFactor * sumOfSinks) / this.vertexCount;
+		}
+
+		@Override
+		public Tuple2<T, DoubleValue> map(Tuple2<T, DoubleValue> value) throws Exception {
+			value.f1.setValue(uniformlyDistributedScore + (dampingFactor * value.f1.getValue()));
+			return value;
+		}
+	}
+
+	/**
+	 * Computes the sum of the absolute change in vertex PageRank scores
+	 * between iterations.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFieldsFirst("0")
+	@ForwardedFieldsSecond("*")
+	private static class ChangeInScores<T>
+	extends RichJoinFunction<Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>, Tuple2<T, DoubleValue>> {
+		private double changeInScores;
+
+		@Override
+		public void open(Configuration parameters) throws Exception {
+			super.open(parameters);
+
+			changeInScores = 0.0;
+		}
+
+		@Override
+		public void close()
+				throws Exception {
+			super.close();
+
+			DoubleSumAggregator agg = getIterationRuntimeContext().getIterationAggregator(CHANGE_IN_SCORES);
+			agg.aggregate(changeInScores);
+		}
+
+		@Override
+		public Tuple2<T, DoubleValue> join(Tuple2<T, DoubleValue> first, Tuple2<T, DoubleValue> second)
+				throws Exception {
+			changeInScores += Math.abs(second.f1.getValue() - first.f1.getValue());
+			return second;
+		}
+	}
+
+	/**
+	 * Monitors the sum of the absolute change in vertex scores. The algorithm
+	 * terminates when the change in scores compared against the prior iteration
+	 * falls to or below the given convergence threshold.
+	 */
+	private static class ScoreConvergence
+	implements ConvergenceCriterion<DoubleValue> {
+		private double convergenceThreshold;
+
+		public ScoreConvergence(double convergenceThreshold) {
+			this.convergenceThreshold = convergenceThreshold;
+		}
+
+		@Override
+		public boolean isConverged(int iteration, DoubleValue value) {
+			double val = value.getValue();
+			return (val <= convergenceThreshold);
+		}
+	}
+
+	/**
+	 * Map the Tuple result to the return type.
+	 *
+	 * @param <T> ID type
+	 */
+	@ForwardedFields("0; 1")
+	private static class TranslateResult<T>
+		implements MapFunction<Tuple2<T, DoubleValue>, Result<T>> {
+		private Result<T> output = new Result<>();
+
+		@Override
+		public Result<T> map(Tuple2<T, DoubleValue> value) throws Exception {
+			output.f0 = value.f0;
+			output.f1 = value.f1;
+			return output;
+		}
+	}
+
+	/**
+	 * Wraps the {@link Tuple2} to encapsulate results from the PageRank algorithm.
+	 *
+	 * @param <T> ID type
+	 */
+	public static class Result<T>
+	extends Tuple2<T, DoubleValue>
+	implements PrintableResult, UnaryResult<T> {
+		public static final int HASH_SEED = 0x4010af29;
+
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
+
+		@Override
+		public T getVertexId0() {
+			return f0;
+		}
+
+		@Override
+		public void setVertexId0(T value) {
+			f0 = value;
+		}
+
+		/**
+		 * Get the PageRank score.
+		 *
+		 * @return the PageRank score
+		 */
+		public DoubleValue getPageRankScore() {
+			return f1;
+		}
+
+		@Override
+		public String toPrintableString() {
+			return "Vertex ID: " + getVertexId0()
+				+ ", PageRank score: " + getPageRankScore();
+		}
+
+		@Override
+		public int hashCode() {
+			return hasher.reset()
+				.hash(f0.hashCode())
+				.hash(f1.getValue())
+				.hash();
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/ChecksumHashCode.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/ChecksumHashCode.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/ChecksumHashCode.java
index d2eeb41..ff4ad48 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/ChecksumHashCode.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/ChecksumHashCode.java
@@ -18,9 +18,9 @@
 
 package org.apache.flink.graph.library.metric;
 
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
 
@@ -35,7 +35,7 @@ import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
  * @param <EV> edge value type
  */
 public class ChecksumHashCode<K, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Checksum> {
+extends GraphAnalyticBase<K, VV, EV, Checksum> {
 
 	private org.apache.flink.graph.asm.dataset.ChecksumHashCode<Vertex<K, VV>> vertexChecksum;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java
index 82cc607..30d3563 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/EdgeMetrics.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.library.metric.directed;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.common.accumulators.LongMaximum;
 import org.apache.flink.api.common.functions.FlatMapFunction;
@@ -31,10 +29,10 @@ import org.apache.flink.api.java.functions.FunctionAnnotation.ForwardedFields;
 import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.api.java.tuple.Tuple4;
 import org.apache.flink.configuration.Configuration;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.degree.annotate.directed.EdgeDegreesPair;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
 import org.apache.flink.graph.asm.result.PrintableResult;
@@ -42,13 +40,16 @@ import org.apache.flink.graph.library.metric.directed.EdgeMetrics.Result;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.util.Collector;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 import java.text.NumberFormat;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
- * Compute the following edge metrics in a directed graph:
+ * Compute the following edge metrics in a directed graph.
  *  - number of triangle triplets
  *  - number of rectangle triplets
  *  - maximum number of triangle triplets
@@ -59,7 +60,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class EdgeMetrics<K extends Comparable<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String TRIANGLE_TRIPLET_COUNT = "triangleTripletCount";
 
@@ -88,11 +89,11 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 	/*
 	 * Implementation notes:
 	 *
-	 * Use aggregator to replace SumEdgeStats when aggregators are rewritten to use
-	 *   a hash-combineable hashable-reduce.
+	 * <p>Use aggregator to replace SumEdgeStats when aggregators are rewritten to use
+	 * a hash-combineable hashable-reduce.
 	 *
-	 * Use distinct to replace ReduceEdgeStats when the combiner can be disabled
-	 *   with a sorted-reduce forced.
+	 * <p>Use distinct to replace ReduceEdgeStats when the combiner can be disabled
+	 * with a sorted-reduce forced.
 	 */
 
 	@Override
@@ -147,7 +148,7 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 	 * the target vertex ID, the source degrees, and the low-order count. The
 	 * second tuple is the same with the source and target roles reversed.
 	 *
-	 * The low-order count is one if the source vertex degree is less than the
+	 * <p>The low-order count is one if the source vertex degree is less than the
 	 * target vertex degree or if the degrees are equal and the source vertex
 	 * ID compares lower than the target vertex ID; otherwise the low-order
 	 * count is zero.
@@ -346,11 +347,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(triangleTripletCount, rhs.triangleTripletCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/VertexMetrics.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/VertexMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/VertexMetrics.java
index 9764f6b..3931f65 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/VertexMetrics.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/directed/VertexMetrics.java
@@ -18,27 +18,28 @@
 
 package org.apache.flink.graph.library.metric.directed;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.common.accumulators.LongMaximum;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees;
 import org.apache.flink.graph.asm.degree.annotate.directed.VertexDegrees.Degrees;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.metric.directed.VertexMetrics.Result;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 import java.text.NumberFormat;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
- * Compute the following vertex metrics in a directed graph:
+ * Compute the following vertex metrics in a directed graph.
  *  - number of vertices
  *  - number of edges
  *  - number of unidirectional edges
@@ -55,7 +56,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class VertexMetrics<K extends Comparable<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String VERTEX_COUNT = "vertexCount";
 
@@ -255,25 +256,25 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		/**
 		 * Get the average degree, the average number of in- plus out-edges per vertex.
 		 *
-		 * A result of {@code Float.NaN} is returned for an empty graph for
+		 * <p>A result of {@code Float.NaN} is returned for an empty graph for
 		 * which both the number of edges and number of vertices is zero.
 		 *
 		 * @return average degree
 		 */
 		public double getAverageDegree() {
-			return vertexCount == 0 ? Double.NaN : getNumberOfEdges() / (double)vertexCount;
+			return vertexCount == 0 ? Double.NaN : getNumberOfEdges() / (double) vertexCount;
 		}
 
 		/**
 		 * Get the density, the ratio of actual to potential edges between vertices.
 		 *
-		 * A result of {@code Float.NaN} is returned for a graph with fewer than
+		 * <p>A result of {@code Float.NaN} is returned for a graph with fewer than
 		 * two vertices for which the number of edges is zero.
 		 *
 		 * @return density
 		 */
 		public double getDensity() {
-			return vertexCount <= 1 ? Double.NaN : getNumberOfEdges() / (double)(vertexCount*(vertexCount-1));
+			return vertexCount <= 1 ? Double.NaN : getNumberOfEdges() / (double) (vertexCount * (vertexCount - 1));
 		}
 
 		/**
@@ -358,11 +359,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(vertexCount, rhs.vertexCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java
index 31f01d8..d1c7fb7 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/EdgeMetrics.java
@@ -18,8 +18,6 @@
 
 package org.apache.flink.graph.library.metric.undirected;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.common.accumulators.LongMaximum;
 import org.apache.flink.api.common.functions.MapFunction;
@@ -28,22 +26,25 @@ import org.apache.flink.api.common.operators.base.ReduceOperatorBase.CombineHint
 import org.apache.flink.api.java.DataSet;
 import org.apache.flink.api.java.functions.FunctionAnnotation;
 import org.apache.flink.api.java.tuple.Tuple3;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Edge;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.asm.degree.annotate.undirected.EdgeDegreePair;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.metric.undirected.EdgeMetrics.Result;
 import org.apache.flink.types.LongValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 import java.text.NumberFormat;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
- * Compute the following edge metrics in an undirected graph:
+ * Compute the following edge metrics in an undirected graph.
  *  - number of triangle triplets
  *  - number of rectangle triplets
  *  - maximum number of triangle triplets
@@ -54,7 +55,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class EdgeMetrics<K extends Comparable<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String TRIANGLE_TRIPLET_COUNT = "triangleTripletCount";
 
@@ -101,8 +102,8 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 	/*
 	 * Implementation notes:
 	 *
-	 * Use aggregator to replace SumEdgeStats when aggregators are rewritten to use
-	 *   a hash-combineable hashed-reduce.
+	 * <p>Use aggregator to replace SumEdgeStats when aggregators are rewritten to use
+	 * a hash-combineable hashed-reduce.
 	 */
 
 	@Override
@@ -319,11 +320,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(triangleTripletCount, rhs.triangleTripletCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/VertexMetrics.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/VertexMetrics.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/VertexMetrics.java
index dd2411e..000f4e0 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/VertexMetrics.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/metric/undirected/VertexMetrics.java
@@ -18,27 +18,28 @@
 
 package org.apache.flink.graph.library.metric.undirected;
 
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.flink.api.common.accumulators.LongCounter;
 import org.apache.flink.api.common.accumulators.LongMaximum;
 import org.apache.flink.api.java.DataSet;
-import org.apache.flink.graph.AbstractGraphAnalytic;
 import org.apache.flink.graph.AnalyticHelper;
 import org.apache.flink.graph.Graph;
+import org.apache.flink.graph.GraphAnalyticBase;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.graph.asm.degree.annotate.undirected.VertexDegree;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.metric.undirected.VertexMetrics.Result;
 import org.apache.flink.types.LongValue;
 
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+
 import java.io.IOException;
 import java.text.NumberFormat;
 
 import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
- * Compute the following vertex metrics in an undirected graph:
+ * Compute the following vertex metrics in an undirected graph.
  *  - number of vertices
  *  - number of edges
  *  - average degree
@@ -51,7 +52,7 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * @param <EV> edge value type
  */
 public class VertexMetrics<K extends Comparable<K>, VV, EV>
-extends AbstractGraphAnalytic<K, VV, EV, Result> {
+extends GraphAnalyticBase<K, VV, EV, Result> {
 
 	private static final String VERTEX_COUNT = "vertexCount";
 
@@ -222,26 +223,26 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 		/**
 		 * Get the average degree, the average number of edges per vertex.
 		 *
-		 * A result of {@code Float.NaN} is returned for an empty graph for
+		 * <p>A result of {@code Float.NaN} is returned for an empty graph for
 		 * which both the number of edges and number of vertices is zero.
 		 *
 		 * @return average degree
 		 */
 		public double getAverageDegree() {
 			// each edge is incident on two vertices
-			return vertexCount == 0 ? Double.NaN : 2 * edgeCount / (double)vertexCount;
+			return vertexCount == 0 ? Double.NaN : 2 * edgeCount / (double) vertexCount;
 		}
 
 		/**
 		 * Get the density, the ratio of actual to potential edges between vertices.
 		 *
-		 * A result of {@code Float.NaN} is returned for a graph with fewer than
+		 * <p>A result of {@code Float.NaN} is returned for a graph with fewer than
 		 * two vertices for which the number of edges is zero.
 		 *
 		 * @return density
 		 */
 		public double getDensity() {
-			return vertexCount <= 1 ? Double.NaN : edgeCount / (double)(vertexCount*(vertexCount-1)/2);
+			return vertexCount <= 1 ? Double.NaN : edgeCount / (double) (vertexCount * (vertexCount - 1) / 2);
 		}
 
 		/**
@@ -301,11 +302,19 @@ extends AbstractGraphAnalytic<K, VV, EV, Result> {
 
 		@Override
 		public boolean equals(Object obj) {
-			if (obj == null) { return false; }
-			if (obj == this) { return true; }
-			if (obj.getClass() != getClass()) { return false; }
+			if (obj == null) {
+				return false;
+			}
+
+			if (obj == this) {
+				return true;
+			}
+
+			if (obj.getClass() != getClass()) {
+				return false;
+			}
 
-			Result rhs = (Result)obj;
+			Result rhs = (Result) obj;
 
 			return new EqualsBuilder()
 				.append(vertexCount, rhs.vertexCount)

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java
index 7df3235..e1bda93 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/AdamicAdar.java
@@ -37,7 +37,7 @@ import org.apache.flink.graph.asm.degree.annotate.undirected.VertexDegree;
 import org.apache.flink.graph.asm.result.BinaryResult;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.similarity.AdamicAdar.Result;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.types.CopyableValue;
 import org.apache.flink.types.FloatValue;
@@ -54,17 +54,17 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
 
 /**
  * http://social.cs.uiuc.edu/class/cs591kgk/friendsadamic.pdf
- * <p>
- * Adamic-Adar measures the similarity between pairs of vertices as the sum of
+ *
+ * <p>Adamic-Adar measures the similarity between pairs of vertices as the sum of
  * the inverse logarithm of degree over shared neighbors. Scores are non-negative
  * and unbounded. A vertex with higher degree has greater overall influence but
  * is less influential to each pair of neighbors.
- * <p>
- * This implementation produces similarity scores for each pair of vertices
+ *
+ * <p>This implementation produces similarity scores for each pair of vertices
  * in the graph with at least one shared neighbor; equivalently, this is the
  * set of all non-zero Adamic-Adar coefficients.
- * <p>
- * The input graph must be a simple, undirected graph containing no duplicate
+ *
+ * <p>The input graph must be a simple, undirected graph containing no duplicate
  * edges or self-loops.
  *
  * @param <K> graph ID type
@@ -137,7 +137,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! AdamicAdar.class.isAssignableFrom(other.getClass())) {
+		if (!AdamicAdar.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -254,7 +254,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 
 			long degree = value.f1.getValue();
 			// when the degree is one the logarithm is zero so avoid dividing by this value
-			float inverseLogDegree = (degree == 1) ? 0.0f : 1.0f / (float)Math.log(value.f1.getValue());
+			float inverseLogDegree = (degree == 1) ? 0.0f : 1.0f / (float) Math.log(value.f1.getValue());
 			output.f2.setValue(inverseLogDegree);
 
 			return output;
@@ -266,7 +266,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	 *
 	 * @param <T> ID type
 	 */
-	@ForwardedFields("0->1; 1->2 ; 2->3")
+	@ForwardedFields("0->1; 1->2; 2->3")
 	private static class GenerateGroupSpans<T>
 	implements GroupReduceFunction<Tuple3<T, T, FloatValue>, Tuple4<IntValue, T, T, FloatValue>> {
 		private IntValue groupSpansValue = new IntValue();
@@ -309,7 +309,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				throws Exception {
 			int spans = value.f0.getValue();
 
-			for (int idx = 0 ; idx < spans ; idx++ ) {
+			for (int idx = 0; idx < spans; idx++) {
 				value.f0.setValue(idx);
 				out.collect(value);
 			}
@@ -339,16 +339,16 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				output.f1 = edge.f2;
 				output.f2 = edge.f3;
 
-				for (int i = 0 ; i < visitedCount ; i++) {
+				for (int i = 0; i < visitedCount; i++) {
 					output.f0 = visited.get(i);
 					out.collect(output);
 				}
 
 				if (visitedCount < GROUP_SIZE) {
-					if (! initialized) {
+					if (!initialized) {
 						initialized = true;
 
-						for (int i = 0 ; i < GROUP_SIZE ; i++) {
+						for (int i = 0; i < GROUP_SIZE; i++) {
 							visited.add(edge.f2.copy());
 						}
 					} else {
@@ -451,7 +451,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	implements PrintableResult, BinaryResult<T>, Comparable<Result<T>> {
 		public static final int HASH_SEED = 0xe405f6d1;
 
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
 
 		/**
 		 * No-args constructor.
@@ -482,7 +482,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 
 		/**
 		 * Get the Adamic-Adar score, equal to the sum over common neighbors of
-		 * the inverse logarithm of degree
+		 * the inverse logarithm of degree.
 		 *
 		 * @return Adamic-Adar score
 		 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java
index b3e69f1..35217c6 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/library/similarity/JaccardIndex.java
@@ -32,7 +32,7 @@ import org.apache.flink.graph.asm.degree.annotate.undirected.EdgeTargetDegree;
 import org.apache.flink.graph.asm.result.BinaryResult;
 import org.apache.flink.graph.asm.result.PrintableResult;
 import org.apache.flink.graph.library.similarity.JaccardIndex.Result;
-import org.apache.flink.graph.utils.Murmur3_32;
+import org.apache.flink.graph.utils.MurmurHash;
 import org.apache.flink.graph.utils.proxy.GraphAlgorithmWrappingDataSet;
 import org.apache.flink.types.CopyableValue;
 import org.apache.flink.types.IntValue;
@@ -50,12 +50,12 @@ import static org.apache.flink.api.common.ExecutionConfig.PARALLELISM_DEFAULT;
  * is computed as the number of shared neighbors divided by the number of
  * distinct neighbors. Scores range from 0.0 (no shared neighbors) to 1.0 (all
  * neighbors are shared).
- * <p>
- * This implementation produces similarity scores for each pair of vertices
+ *
+ * <p>This implementation produces similarity scores for each pair of vertices
  * in the graph with at least one shared neighbor; equivalently, this is the
  * set of all non-zero Jaccard Similarity coefficients.
- * <p>
- * The input graph must be a simple, undirected graph containing no duplicate
+ *
+ * <p>The input graph must be a simple, undirected graph containing no duplicate
  * edges or self-loops.
  *
  * @param <K> graph ID type
@@ -89,7 +89,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	 * pairs. Small groups generate more data whereas large groups distribute
 	 * computation less evenly among tasks.
 	 *
-	 * The default value should be near-optimal for all use cases.
+	 * <p>The default value should be near-optimal for all use cases.
 	 *
 	 * @param groupSize the group size for the quadratic expansion of neighbor pairs
 	 * @return this
@@ -180,7 +180,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	protected boolean mergeConfiguration(GraphAlgorithmWrappingDataSet other) {
 		Preconditions.checkNotNull(other);
 
-		if (! JaccardIndex.class.isAssignableFrom(other.getClass())) {
+		if (!JaccardIndex.class.isAssignableFrom(other.getClass())) {
 			return false;
 		}
 
@@ -267,11 +267,11 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	 * This is the first of three operations implementing a self-join to generate
 	 * the full neighbor pairing for each vertex. The number of neighbor pairs
 	 * is (n choose 2) which is quadratic in the vertex degree.
-	 * <p>
-	 * The third operation, {@link GenerateGroupPairs}, processes groups of size
+	 *
+	 * <p>The third operation, {@link GenerateGroupPairs}, processes groups of size
 	 * {@link #groupSize} and emits {@code O(groupSize * deg(vertex))} pairs.
-	 * <p>
-	 * This input to the third operation is still quadratic in the vertex degree.
+	 *
+	 * <p>This input to the third operation is still quadratic in the vertex degree.
 	 * Two prior operations, {@link GenerateGroupSpans} and {@link GenerateGroups},
 	 * each emit datasets linear in the vertex degree, with a forced rebalance
 	 * in between. {@link GenerateGroupSpans} first annotates each edge with the
@@ -310,7 +310,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				// group span, u, v, d(v)
 				output.f1 = edge.f0;
 				output.f2 = edge.f1;
-				output.f3.setValue((int)degree);
+				output.f3.setValue((int) degree);
 
 				out.collect(output);
 
@@ -337,7 +337,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				throws Exception {
 			int spans = value.f0.getValue();
 
-			for (int idx = 0 ; idx < spans ; idx++ ) {
+			for (int idx = 0; idx < spans; idx++) {
 				value.f0.setValue(idx);
 				out.collect(value);
 			}
@@ -346,8 +346,8 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 
 	/**
 	 * Emits the two-path for all neighbor pairs in this group.
-	 * <p>
-	 * The first {@link #groupSize} vertices are emitted pairwise. Following
+	 *
+	 * <p>The first {@link #groupSize} vertices are emitted pairwise. Following
 	 * vertices are only paired with vertices from this initial group.
 	 *
 	 * @param <T> ID type
@@ -373,7 +373,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			int visitedCount = 0;
 
 			for (Tuple4<IntValue, T, T, IntValue> edge : values) {
-				for (int i = 0 ; i < visitedCount ; i++) {
+				for (int i = 0; i < visitedCount; i++) {
 					Tuple3<T, T, IntValue> prior = visited.get(i);
 
 					prior.f1 = edge.f2;
@@ -384,7 +384,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 					if (degreeSum > Integer.MAX_VALUE) {
 						throw new RuntimeException("Degree sum overflows IntValue");
 					}
-					prior.f2.setValue((int)degreeSum);
+					prior.f2.setValue((int) degreeSum);
 
 					// v, w, d(v) + d(w)
 					out.collect(prior);
@@ -393,10 +393,10 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 				}
 
 				if (visitedCount < groupSize) {
-					if (! initialized) {
+					if (!initialized) {
 						initialized = true;
 
-						for (int i = 0 ; i < groupSize ; i++) {
+						for (int i = 0; i < groupSize; i++) {
 							Tuple3<T, T, IntValue> tuple = new Tuple3<>();
 
 							tuple.f0 = edge.f2.copy();
@@ -506,7 +506,7 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 	implements PrintableResult, BinaryResult<T>, Comparable<Result<T>> {
 		public static final int HASH_SEED = 0x731f73e7;
 
-		private Murmur3_32 hasher = new Murmur3_32(HASH_SEED);
+		private MurmurHash hasher = new MurmurHash(HASH_SEED);
 
 		public Result() {
 			f2 = new IntValue();
@@ -584,8 +584,8 @@ extends GraphAlgorithmWrappingDataSet<K, VV, EV, Result<K>> {
 			// exact comparison of a/b with x/y using only integer math:
 			// a/b <?> x/y == a*y <?> b*x
 
-			long ay = getSharedNeighborCount().getValue() * (long)o.getDistinctNeighborCount().getValue();
-			long bx = getDistinctNeighborCount().getValue() * (long)o.getSharedNeighborCount().getValue();
+			long ay = getSharedNeighborCount().getValue() * (long) o.getDistinctNeighborCount().getValue();
+			long bx = getDistinctNeighborCount().getValue() * (long) o.getSharedNeighborCount().getValue();
 
 			return Long.compare(ay, bx);
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java
index af25377..1f2d832 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/ComputeFunction.java
@@ -35,7 +35,7 @@ import java.util.Iterator;
 
 /**
  * The base class for the message-passing functions between vertices as a part of a {@link VertexCentricIteration}.
- * 
+ *
  * @param <K> The type of the vertex key (the vertex identifier).
  * @param <VV> The type of the vertex value (the state of the vertex).
  * @param <EV> The type of the values that are associated with the edges.
@@ -53,10 +53,10 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 	 * This method is invoked once per superstep, for each active vertex.
 	 * A vertex is active during a superstep, if at least one message was produced for it,
 	 * in the previous superstep. During the first superstep, all vertices are active.
-	 * <p>
-	 * This method can iterate over all received messages, set the new vertex value, and
+	 *
+	 * <p>This method can iterate over all received messages, set the new vertex value, and
 	 * send messages to other vertices (which will be delivered in the next superstep).
-	 * 
+	 *
 	 * @param vertex The vertex executing this function
 	 * @param messages The messages that were sent to this vertex in the previous superstep
 	 * @throws Exception
@@ -65,23 +65,22 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 
 	/**
 	 * This method is executed once per superstep before the vertex update function is invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the pre-superstep phase cause the superstep to fail.
 	 */
 	public void preSuperstep() throws Exception {}
-	
+
 	/**
 	 * This method is executed once per superstep after the vertex update function has been invoked for each vertex.
-	 * 
+	 *
 	 * @throws Exception Exceptions in the post-superstep phase cause the superstep to fail.
 	 */
 	public void postSuperstep() throws Exception {}
-	
-	
+
 	/**
 	 * Gets an {@link java.lang.Iterable} with all out-going edges. This method is mutually exclusive with
 	 * {@link #sendMessageToAllNeighbors(Object)} and may be called only once.
-	 * 
+	 *
 	 * @return An iterator with all edges.
 	 */
 	public final Iterable<Edge<K, EV>> getEdges() {
@@ -93,7 +92,7 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 	/**
 	 * Sends the given message to all vertices that adjacent to the changed vertex.
 	 * This method is mutually exclusive to the method {@link #getEdges()} and may be called only once.
-	 * 
+	 *
 	 * @param m The message to send.
 	 */
 	public final void sendMessageToAllNeighbors(Message m) {
@@ -105,11 +104,11 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 			out.collect(Either.Right(outMsg));
 		}
 	}
-	
+
 	/**
 	 * Sends the given message to the vertex identified by the given key. If the target vertex does not exist,
 	 * the next superstep will cause an exception due to a non-deliverable message.
-	 * 
+	 *
 	 * @param target The key (id) of the target vertex to message.
 	 * @param m The message.
 	 */
@@ -124,12 +123,12 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 	/**
 	 * Sets the new value of this vertex.
 	 *
-	 * This should be called at most once per ComputeFunction.
-	 * 
+	 * <p>This should be called at most once per ComputeFunction.
+	 *
 	 * @param newValue The new vertex value.
 	 */
 	public final void setNewVertexValue(VV newValue) {
-		if(setNewVertexValueCalled) {
+		if (setNewVertexValueCalled) {
 			throw new IllegalStateException("setNewVertexValue should only be called at most once per updateVertex");
 		}
 		setNewVertexValueCalled = true;
@@ -138,43 +137,44 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 
 		out.collect(Either.Left(outVertex));
 	}
+
 	// --------------------------------------------------------------------------------------------
-	
+
 	/**
 	 * Gets the number of the superstep, starting at <tt>1</tt>.
-	 * 
+	 *
 	 * @return The number of the current superstep.
 	 */
 	public final int getSuperstepNumber() {
 		return this.runtimeContext.getSuperstepNumber();
 	}
-	
+
 	/**
 	 * Gets the iteration aggregator registered under the given name. The iteration aggregator combines
 	 * all aggregates globally once per superstep and makes them available in the next superstep.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregator registered under this name, or {@code null}, if no aggregator was registered.
 	 */
 	public final <T extends Aggregator<?>> T getIterationAggregator(String name) {
 		return this.runtimeContext.getIterationAggregator(name);
 	}
-	
+
 	/**
 	 * Get the aggregated value that an aggregator computed in the previous iteration.
-	 * 
+	 *
 	 * @param name The name of the aggregator.
 	 * @return The aggregated value of the previous iteration.
 	 */
 	public final <T extends Value> T getPreviousIterationAggregate(String name) {
 		return this.runtimeContext.getPreviousIterationAggregate(name);
 	}
-	
+
 	/**
 	 * Gets the broadcast data set registered under the given name. Broadcast data sets
 	 * are available on all parallel instances of a function. They can be registered via
 	 * {@link org.apache.flink.graph.pregel.VertexCentricConfiguration#addBroadcastSet(String, DataSet)}.
-	 * 
+	 *
 	 * @param name The name under which the broadcast set is registered.
 	 * @return The broadcast data set.
 	 */
@@ -189,26 +189,26 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 	private Vertex<K, VV> outVertex;
 
 	private Tuple2<K, Message> outMsg;
-	
+
 	private IterationRuntimeContext runtimeContext;
-	
+
 	private Iterator<Edge<K, EV>> edges;
-	
+
 	private Collector<Either<?, ?>> out;
-	
+
 	private EdgesIterator<K, EV> edgeIterator;
-	
+
 	private boolean edgesUsed;
 
 	private boolean setNewVertexValueCalled;
-	
+
 	void init(IterationRuntimeContext context) {
 		this.runtimeContext = context;
 		this.outVertex = new Vertex<>();
 		this.outMsg = new Tuple2<>();
 		this.edgeIterator = new EdgesIterator<>();
 	}
-	
+
 	@SuppressWarnings("unchecked")
 	void set(K vertexId, Iterator<Edge<K, EV>> edges,
 			Collector<Either<Vertex<K, VV>, Tuple2<K, Message>>> out) {
@@ -228,17 +228,17 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 		edgesUsed = true;
 	}
 
-	private static final class EdgesIterator<K, EV> 
-		implements Iterator<Edge<K, EV>>, Iterable<Edge<K, EV>>
-	{
+	private static final class EdgesIterator<K, EV>
+		implements Iterator<Edge<K, EV>>, Iterable<Edge<K, EV>> {
+
 		private Iterator<Edge<K, EV>> input;
-		
+
 		private Edge<K, EV> edge = new Edge<>();
 
 		void set(Iterator<Edge<K, EV>> input) {
 			this.input = input;
 		}
-		
+
 		@Override
 		public boolean hasNext() {
 			return input.hasNext();
@@ -257,6 +257,7 @@ public abstract class ComputeFunction<K, VV, EV, Message> implements Serializabl
 		public void remove() {
 			throw new UnsupportedOperationException();
 		}
+
 		@Override
 		public Iterator<Edge<K, EV>> iterator() {
 			return this;

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageCombiner.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageCombiner.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageCombiner.java
index 6e51a3a..204bc9a 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageCombiner.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageCombiner.java
@@ -27,7 +27,7 @@ import java.io.Serializable;
 
 /**
  * The base class for combining messages sent during a {@link VertexCentricIteration}.
- * 
+ *
  * @param <K> The type of the vertex id
  * @param <Message> The type of the message sent between vertices along the edges.
  */
@@ -49,7 +49,7 @@ public abstract class MessageCombiner<K, Message> implements Serializable {
 	 * Combines messages sent from different vertices to a target vertex.
 	 * Implementing this method might reduce communication costs during a vertex-centric
 	 * iteration.
-	 * 
+	 *
 	 * @param messages the input messages to combine
 	 * @throws Exception
 	 */
@@ -57,7 +57,7 @@ public abstract class MessageCombiner<K, Message> implements Serializable {
 
 	/**
 	 * Sends the combined message to the target vertex.
-	 * 
+	 *
 	 * @param combinedMessage
 	 * @throws Exception
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageIterator.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageIterator.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageIterator.java
index 5a17b90..f8dd926 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageIterator.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/MessageIterator.java
@@ -18,12 +18,12 @@
 
 package org.apache.flink.graph.pregel;
 
-import java.util.Iterator;
-
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.types.Either;
 import org.apache.flink.types.NullValue;
 
+import java.util.Iterator;
+
 /**
  * An iterator that returns messages. The iterator is {@link java.lang.Iterable} at the same time to support
  * the <i>foreach</i> syntax.
@@ -33,17 +33,17 @@ public final class MessageIterator<Message> implements Iterator<Message>, Iterab
 
 	private transient Iterator<Tuple2<?, Either<NullValue, Message>>> source;
 	private Message first = null;
-	
-	final void setSource(Iterator<Tuple2<?, Either<NullValue, Message>>> source) {
+
+	void setSource(Iterator<Tuple2<?, Either<NullValue, Message>>> source) {
 		this.source = source;
 	}
 
-	final void setFirst(Message msg) {
+	void setFirst(Message msg) {
 		this.first = msg;
 	}
-	
+
 	@Override
-	public final boolean hasNext() {
+	public boolean hasNext() {
 		if (first != null) {
 			return true;
 		}
@@ -51,9 +51,9 @@ public final class MessageIterator<Message> implements Iterator<Message>, Iterab
 			return ((this.source != null) && (this.source.hasNext()));
 		}
 	}
-	
+
 	@Override
-	public final Message next() {
+	public Message next() {
 		if (first != null) {
 			Message toReturn = first;
 			first = null;
@@ -63,7 +63,7 @@ public final class MessageIterator<Message> implements Iterator<Message>, Iterab
 	}
 
 	@Override
-	public final void remove() {
+	public void remove() {
 		throw new UnsupportedOperationException();
 	}
 
@@ -71,4 +71,4 @@ public final class MessageIterator<Message> implements Iterator<Message>, Iterab
 	public Iterator<Message> iterator() {
 		return this;
 	}
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java
index a0f793a..69fcc52 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/pregel/VertexCentricConfiguration.java
@@ -30,13 +30,13 @@ import java.util.List;
  * degree of parallelism, to register aggregators and use broadcast sets in
  * the {@link org.apache.flink.graph.pregel.ComputeFunction}.
  *
- * The VertexCentricConfiguration object is passed as an argument to
+ * <p>The VertexCentricConfiguration object is passed as an argument to
  * {@link org.apache.flink.graph.Graph#runVertexCentricIteration (
  * org.apache.flink.graph.pregel.ComputeFunction, int, VertexCentricConfiguration)}.
  */
 public class VertexCentricConfiguration extends IterationConfiguration {
 
-	/** the broadcast variables for the compute function **/
+	/** The broadcast variables for the compute function. **/
 	private List<Tuple2<String, DataSet<?>>> bcVars = new ArrayList<>();
 
 	public VertexCentricConfiguration() {}


[04/15] flink git commit: [FLINK-6709] [gelly] Activate strict checkstyle for flink-gellies

Posted by ch...@apache.org.
http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java
new file mode 100644
index 0000000..9f9bc06
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/HITSTest.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.library.linkanalysis;
+
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.graph.asm.AsmTestBase;
+import org.apache.flink.graph.asm.dataset.Collect;
+import org.apache.flink.graph.library.linkanalysis.HITS.Result;
+import org.apache.flink.types.IntValue;
+import org.apache.flink.types.LongValue;
+import org.apache.flink.types.NullValue;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests for {@link HITS}.
+ */
+public class HITSTest
+extends AsmTestBase {
+
+	/*
+	 * This test result can be verified with the following Python script.
+
+	import math
+	import networkx as nx
+
+	graph=nx.read_edgelist('directedSimpleGraph.csv', delimiter=',', create_using=nx.DiGraph())
+	hits=nx.algorithms.link_analysis.hits(graph)
+
+	hubbiness_norm=math.sqrt(sum(v*v for v in hits[0].values()))
+	authority_norm=math.sqrt(sum(v*v for v in hits[1].values()))
+
+	for key in sorted(hits[0]):
+		print('{}: {}, {}'.format(key, hits[0][key]/hubbiness_norm, hits[1][key]/authority_norm))
+	 */
+	@Test
+	public void testWithSimpleGraph()
+			throws Exception {
+		DataSet<Result<IntValue>> hits = new HITS<IntValue, NullValue, NullValue>(20)
+			.run(directedSimpleGraph);
+
+		List<Tuple2<Double, Double>> expectedResults = new ArrayList<>();
+		expectedResults.add(Tuple2.of(0.544643396306, 0.0));
+		expectedResults.add(Tuple2.of(0.0, 0.836329395866));
+		expectedResults.add(Tuple2.of(0.607227031134, 0.268492526138));
+		expectedResults.add(Tuple2.of(0.544643396306, 0.395444899355));
+		expectedResults.add(Tuple2.of(0.0, 0.268492526138));
+		expectedResults.add(Tuple2.of(0.194942233447, 0.0));
+
+		for (Result<IntValue> result : hits.collect()) {
+			int id = result.f0.getValue();
+			assertEquals(expectedResults.get(id).f0, result.getHubScore().getValue(), 0.000001);
+			assertEquals(expectedResults.get(id).f1, result.getAuthorityScore().getValue(), 0.000001);
+		}
+	}
+
+	@Test
+	public void testWithCompleteGraph()
+			throws Exception {
+		double expectedScore = 1.0 / Math.sqrt(completeGraphVertexCount);
+
+		DataSet<Result<LongValue>> hits = new HITS<LongValue, NullValue, NullValue>(0.000001)
+			.run(completeGraph);
+
+		List<Result<LongValue>> results = hits.collect();
+
+		assertEquals(completeGraphVertexCount, results.size());
+
+		for (Result<LongValue> result : results) {
+			assertEquals(expectedScore, result.getHubScore().getValue(), 0.000001);
+			assertEquals(expectedScore, result.getAuthorityScore().getValue(), 0.000001);
+		}
+	}
+
+	/*
+	 * This test result can be verified with the following Python script.
+
+	import math
+	import networkx as nx
+
+	graph=nx.read_edgelist('directedRMatGraph.csv', delimiter=',', create_using=nx.DiGraph())
+	hits=nx.algorithms.link_analysis.hits(graph)
+
+	hubbiness_norm=math.sqrt(sum(v*v for v in hits[0].values()))
+	authority_norm=math.sqrt(sum(v*v for v in hits[1].values()))
+
+	for key in [0, 1, 2, 8, 13, 29, 109, 394, 652, 1020]:
+		print('{}: {}, {}'.format(key, hits[0][str(key)]/hubbiness_norm, hits[1][str(key)]/authority_norm))
+	 */
+	@Test
+	public void testWithRMatGraph()
+			throws Exception {
+		DataSet<Result<LongValue>> hits = directedRMatGraph(10, 16)
+			.run(new HITS<LongValue, NullValue, NullValue>(0.000001));
+
+		Map<Long, Result<LongValue>> results = new HashMap<>();
+		for (Result<LongValue> result :  new Collect<Result<LongValue>>().run(hits).execute()) {
+			results.put(result.f0.getValue(), result);
+		}
+
+		assertEquals(902, results.size());
+
+		Map<Long, Tuple2<Double, Double>> expectedResults = new HashMap<>();
+		// a pseudo-random selection of results, both high and low
+		expectedResults.put(0L, Tuple2.of(0.231077034747, 0.238110214937));
+		expectedResults.put(1L, Tuple2.of(0.162364053933, 0.169679504287));
+		expectedResults.put(2L, Tuple2.of(0.162412612499, 0.161015667261));
+		expectedResults.put(8L, Tuple2.of(0.167064641724, 0.158592966505));
+		expectedResults.put(13L, Tuple2.of(0.041915595624, 0.0407091625629));
+		expectedResults.put(29L, Tuple2.of(0.0102017346511, 0.0146218045999));
+		expectedResults.put(109L, Tuple2.of(0.00190531000389, 0.00481944993023));
+		expectedResults.put(394L, Tuple2.of(0.0122287016161, 0.0147987969538));
+		expectedResults.put(652L, Tuple2.of(0.010966659242, 0.0113713306749));
+		expectedResults.put(1020L, Tuple2.of(0.0, 0.000326973732127));
+
+		for (Map.Entry<Long, Tuple2<Double, Double>> expected : expectedResults.entrySet()) {
+			double hubScore = results.get(expected.getKey()).getHubScore().getValue();
+			double authorityScore = results.get(expected.getKey()).getAuthorityScore().getValue();
+
+			assertEquals(expected.getValue().f0, hubScore, 0.00001);
+			assertEquals(expected.getValue().f1, authorityScore, 0.00001);
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/PageRankTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/PageRankTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/PageRankTest.java
new file mode 100644
index 0000000..9c3de71
--- /dev/null
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/linkanalysis/PageRankTest.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph.library.linkanalysis;
+
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.graph.asm.AsmTestBase;
+import org.apache.flink.graph.asm.dataset.Collect;
+import org.apache.flink.graph.library.linkanalysis.PageRank.Result;
+import org.apache.flink.types.DoubleValue;
+import org.apache.flink.types.IntValue;
+import org.apache.flink.types.LongValue;
+import org.apache.flink.types.NullValue;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Tests for {@link PageRank}.
+ */
+public class PageRankTest
+extends AsmTestBase {
+
+	private static final double DAMPING_FACTOR = 0.85;
+
+	/*
+	 * This test result can be verified with the following Python script.
+
+	import networkx as nx
+
+	graph=nx.read_edgelist('directedSimpleGraph.csv', delimiter=',', create_using=nx.DiGraph())
+	pagerank=nx.algorithms.link_analysis.pagerank(graph)
+
+	for key in sorted(pagerank):
+		print('{}: {}'.format(key, pagerank[key]))
+	 */
+	@Test
+	public void testWithSimpleGraph()
+			throws Exception {
+		DataSet<Result<IntValue>> pr = new PageRank<IntValue, NullValue, NullValue>(DAMPING_FACTOR, 10)
+			.run(directedSimpleGraph);
+
+		List<Double> expectedResults = new ArrayList<>();
+		expectedResults.add(0.09091296131286301);
+		expectedResults.add(0.27951855944178117);
+		expectedResults.add(0.12956847924535586);
+		expectedResults.add(0.22329643739217675);
+		expectedResults.add(0.18579060129496028);
+		expectedResults.add(0.09091296131286301);
+
+		for (Tuple2<IntValue, DoubleValue> result : pr.collect()) {
+			int id = result.f0.getValue();
+			assertEquals(expectedResults.get(id), result.f1.getValue(), 0.000001);
+		}
+	}
+
+	@Test
+	public void testWithCompleteGraph()
+			throws Exception {
+		double expectedScore = 1.0 / completeGraphVertexCount;
+
+		DataSet<Result<LongValue>> pr = new PageRank<LongValue, NullValue, NullValue>(DAMPING_FACTOR, 0.000001)
+			.run(completeGraph);
+
+		List<Result<LongValue>> results = pr.collect();
+
+		assertEquals(completeGraphVertexCount, results.size());
+
+		for (Tuple2<LongValue, DoubleValue> result : results) {
+			assertEquals(expectedScore, result.f1.getValue(), 0.000001);
+		}
+	}
+
+	/*
+	 * This test result can be verified with the following Python script.
+
+	import networkx as nx
+
+	graph=nx.read_edgelist('directedRMatGraph.csv', delimiter=',', create_using=nx.DiGraph())
+	pagerank=nx.algorithms.link_analysis.pagerank(graph)
+
+	for key in [0, 1, 2, 8, 13, 29, 109, 394, 652, 1020]:
+		print('{}: {}'.format(key, pagerank[str(key)]))
+	 */
+	@Test
+	public void testWithRMatGraph()
+			throws Exception {
+		DataSet<Result<LongValue>> pr = new PageRank<LongValue, NullValue, NullValue>(DAMPING_FACTOR, 0.000001)
+			.run(directedRMatGraph(10, 16));
+
+		Map<Long, Result<LongValue>> results = new HashMap<>();
+		for (Result<LongValue> result :  new Collect<Result<LongValue>>().run(pr).execute()) {
+			results.put(result.getVertexId0().getValue(), result);
+		}
+
+		assertEquals(902, results.size());
+
+		Map<Long, Double> expectedResults = new HashMap<>();
+		// a pseudo-random selection of results, both high and low
+		expectedResults.put(0L, 0.027111807822);
+		expectedResults.put(1L, 0.0132842310382);
+		expectedResults.put(2L, 0.0121818392504);
+		expectedResults.put(8L, 0.0115916809743);
+		expectedResults.put(13L, 0.00183249490033);
+		expectedResults.put(29L, 0.000848095047082);
+		expectedResults.put(109L, 0.000308507844048);
+		expectedResults.put(394L, 0.000828743280246);
+		expectedResults.put(652L, 0.000684102931253);
+		expectedResults.put(1020L, 0.000250487135148);
+
+		for (Map.Entry<Long, Double> expected : expectedResults.entrySet()) {
+			double value = results.get(expected.getKey()).getPageRankScore().getValue();
+
+			assertEquals(expected.getValue(), value, 0.00001);
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java
index 24f0c2d..9b1d18c 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/ChecksumHashCodeTest.java
@@ -22,10 +22,14 @@ import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.asm.dataset.ChecksumHashCode.Checksum;
 import org.apache.flink.graph.test.TestGraphUtils;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link ChecksumHashCode}.
+ */
 public class ChecksumHashCodeTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/EdgeMetricsTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/EdgeMetricsTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/EdgeMetricsTest.java
index 117b3ae..05042c2 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/EdgeMetricsTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/EdgeMetricsTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.metric.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.metric.directed.EdgeMetrics.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeMetrics}.
+ */
 public class EdgeMetricsTest
 extends AsmTestBase {
 
@@ -47,7 +51,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 		long expectedTriplets = completeGraphVertexCount * expectedMaximumTriplets;
 
 		Result expectedResult = new Result(expectedTriplets / 3, 2 * expectedTriplets / 3,

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/VertexMetricsTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/VertexMetricsTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/VertexMetricsTest.java
index 54301f5..f72a7bb 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/VertexMetricsTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/directed/VertexMetricsTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.metric.directed;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.metric.directed.VertexMetrics.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexMetrics}.
+ */
 public class VertexMetricsTest
 extends AsmTestBase {
 
@@ -48,7 +52,7 @@ extends AsmTestBase {
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
 		long expectedBidirectionalEdges = completeGraphVertexCount * expectedDegree / 2;
-		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 		long expectedTriplets = completeGraphVertexCount * expectedMaximumTriplets;
 
 		Result expectedResult = new Result(completeGraphVertexCount, 0, expectedBidirectionalEdges,

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/EdgeMetricsTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/EdgeMetricsTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/EdgeMetricsTest.java
index b4e9f95..3e23906 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/EdgeMetricsTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/EdgeMetricsTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.metric.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.metric.undirected.EdgeMetrics.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link EdgeMetrics}.
+ */
 public class EdgeMetricsTest
 extends AsmTestBase {
 
@@ -47,7 +51,7 @@ extends AsmTestBase {
 	public void testWithCompleteGraph()
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
-		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 		long expectedTriplets = completeGraphVertexCount * expectedMaximumTriplets;
 
 		Result expectedResult = new Result(expectedTriplets / 3, 2 * expectedTriplets / 3,

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/VertexMetricsTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/VertexMetricsTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/VertexMetricsTest.java
index 848ad79..71e587b 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/VertexMetricsTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/metric/undirected/VertexMetricsTest.java
@@ -18,16 +18,20 @@
 
 package org.apache.flink.graph.library.metric.undirected;
 
-import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.apache.flink.graph.asm.AsmTestBase;
 import org.apache.flink.graph.library.metric.undirected.VertexMetrics.Result;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
+import org.apache.commons.math3.util.CombinatoricsUtils;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link VertexMetrics}.
+ */
 public class VertexMetricsTest
 extends AsmTestBase {
 
@@ -48,7 +52,7 @@ extends AsmTestBase {
 			throws Exception {
 		long expectedDegree = completeGraphVertexCount - 1;
 		long expectedEdges = completeGraphVertexCount * expectedDegree / 2;
-		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int)expectedDegree, 2);
+		long expectedMaximumTriplets = CombinatoricsUtils.binomialCoefficient((int) expectedDegree, 2);
 		long expectedTriplets = completeGraphVertexCount * expectedMaximumTriplets;
 
 		Result expectedResult = new Result(completeGraphVertexCount, expectedEdges, expectedTriplets,

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java
index 76b28da..aa259a2 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/AdamicAdarTest.java
@@ -25,20 +25,24 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link AdamicAdar}.
+ */
 public class AdamicAdarTest
 extends AsmTestBase {
 
 	private float[] ilog = {
-		1.0f / (float)Math.log(2),
-		1.0f / (float)Math.log(3),
-		1.0f / (float)Math.log(3),
-		1.0f / (float)Math.log(4),
-		1.0f / (float)Math.log(1),
-		1.0f / (float)Math.log(1)
+		1.0f / (float) Math.log(2),
+		1.0f / (float) Math.log(3),
+		1.0f / (float) Math.log(3),
+		1.0f / (float) Math.log(4),
+		1.0f / (float) Math.log(1),
+		1.0f / (float) Math.log(1)
 	};
 
 	@Test
@@ -98,7 +102,7 @@ extends AsmTestBase {
 	@Test
 	public void testCompleteGraph()
 			throws Exception {
-		float expectedScore = (completeGraphVertexCount - 2) / (float)Math.log(completeGraphVertexCount - 1);
+		float expectedScore = (completeGraphVertexCount - 2) / (float) Math.log(completeGraphVertexCount - 1);
 
 		DataSet<Result<LongValue>> aa = completeGraph
 			.run(new AdamicAdar<LongValue, NullValue, NullValue>());

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java
index 2443359..d8cd298 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/library/similarity/JaccardIndexTest.java
@@ -27,10 +27,14 @@ import org.apache.flink.test.util.TestBaseUtils;
 import org.apache.flink.types.IntValue;
 import org.apache.flink.types.LongValue;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Tests for {@link JaccardIndex}.
+ */
 public class JaccardIndexTest
 extends AsmTestBase {
 

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java
index fb21c14..71937db 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelCompilerTest.java
@@ -38,12 +38,15 @@ import org.apache.flink.optimizer.plan.WorksetIterationPlanNode;
 import org.apache.flink.optimizer.util.CompilerTestBase;
 import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
+/**
+ * Validate compiled {@link VertexCentricIteration} programs.
+ */
 public class PregelCompilerTest extends CompilerTestBase {
 
 	private static final long serialVersionUID = 1L;
@@ -51,223 +54,198 @@ public class PregelCompilerTest extends CompilerTestBase {
 	@SuppressWarnings("serial")
 	@Test
 	public void testPregelCompiler() {
-		try {
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-
-				DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
-						new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
-						.map(new Tuple2ToVertexMap<Long, Long>());
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
-					.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
-
-						public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
-							return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
-						}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+		// compose test program
+		{
+
+			DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
+				new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
+				.map(new Tuple2ToVertexMap<Long, Long>());
+
+			DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
+				.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
+
+					public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
+						return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
+					}
 				});
 
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
-				
-				DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
-						new CCCompute(), null, 100).getVertices();
-				
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-			}
-			
-			Plan p = env.createProgramPlan("Pregel Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-			
-			// check the solution set delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof SingleInputPlanNode);
-			
-			SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
-			
-			// check the computation coGroup
-			DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
-			assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
-			assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
-			
-			assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
-			
-			// check that the initial partitioning is pushed out of the loop
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
+			Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
 
+			DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
+				new CCCompute(), null, 100).getVertices();
+
+			result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
 		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+
+		Plan p = env.createProgramPlan("Pregel Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the solution set delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof SingleInputPlanNode);
+
+		SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
+
+		// check the computation coGroup
+		DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
+		assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
+		assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
+
+		// check that the initial partitioning is pushed out of the loop
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
 	}
-	
+
 	@SuppressWarnings("serial")
 	@Test
 	public void testPregelCompilerWithBroadcastVariable() {
-		try {
-			final String BC_VAR_NAME = "borat variable";
-			
-			
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-				DataSet<Long> bcVar = env.fromElements(1L);
-
-				DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
-						new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
-						.map(new Tuple2ToVertexMap<Long, Long>());
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
-						.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
-
-							public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
-								return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
-							}
-					});
-
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
-
-				VertexCentricConfiguration parameters = new VertexCentricConfiguration();
-				parameters.addBroadcastSet(BC_VAR_NAME, bcVar);
-
-				DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
-						new CCCompute(), null, 100, parameters)
-						.getVertices();
-					
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
+		final String broadcastSetName = "broadcast";
 
-			}
-			
-			Plan p = env.createProgramPlan("Pregel Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-			
-			// check the solution set delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof SingleInputPlanNode);
-			
-			SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
-			
-			// check the computation coGroup
-			DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
-			assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
-			assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
-			
-			assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
-			
-			// check that the initial partitioning is pushed out of the loop
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+		// compose test program
+		{
+			DataSet<Long> bcVar = env.fromElements(1L);
+
+			DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
+				new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
+				.map(new Tuple2ToVertexMap<Long, Long>());
+
+			DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
+				.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
+
+					public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
+						return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
+					}
+				});
+
+			Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
+
+			VertexCentricConfiguration parameters = new VertexCentricConfiguration();
+			parameters.addBroadcastSet(broadcastSetName, bcVar);
+
+			DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
+				new CCCompute(), null, 100, parameters)
+				.getVertices();
+
+			result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
 		}
+
+		Plan p = env.createProgramPlan("Pregel Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the solution set delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof SingleInputPlanNode);
+
+		SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
+
+		// check the computation coGroup
+		DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
+		assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
+		assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
+
+		// check that the initial partitioning is pushed out of the loop
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
 	}
 
 	@SuppressWarnings("serial")
 	@Test
 	public void testPregelWithCombiner() {
-		try {
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-
-				DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
-						new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
-						.map(new Tuple2ToVertexMap<Long, Long>());
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
-					.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
-
-						public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
-							return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
-						}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+		// compose test program
+		{
+
+			DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
+				new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
+				.map(new Tuple2ToVertexMap<Long, Long>());
+
+			DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
+				.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
+
+					public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
+						return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
+					}
 				});
 
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
-				
-				DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
-						new CCCompute(), new CCCombiner(), 100).getVertices();
-				
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-			}
-			
-			Plan p = env.createProgramPlan("Pregel Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-
-			// check the combiner
-			SingleInputPlanNode combiner = (SingleInputPlanNode) iteration.getInput2().getSource();
-			assertEquals(ShipStrategyType.FORWARD, combiner.getInput().getShipStrategy());
-			
-			// check the solution set delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof SingleInputPlanNode);
-			
-			SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
-			
-			// check the computation coGroup
-			DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
-			assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
-			assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
-			assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
-			
-			assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
-			
-			// check that the initial partitioning is pushed out of the loop
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
+			Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
 
+			DataSet<Vertex<Long, Long>> result = graph.runVertexCentricIteration(
+				new CCCompute(), new CCCombiner(), 100).getVertices();
+
+			result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
 		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+
+		Plan p = env.createProgramPlan("Pregel Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the combiner
+		SingleInputPlanNode combiner = (SingleInputPlanNode) iteration.getInput2().getSource();
+		assertEquals(ShipStrategyType.FORWARD, combiner.getInput().getShipStrategy());
+
+		// check the solution set delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof SingleInputPlanNode);
+
+		SingleInputPlanNode ssFlatMap = (SingleInputPlanNode) ((SingleInputPlanNode) (ssDelta)).getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, ssFlatMap.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, ssFlatMap.getInput().getShipStrategy());
+
+		// check the computation coGroup
+		DualInputPlanNode computationCoGroup = (DualInputPlanNode) (ssFlatMap.getInput().getSource());
+		assertEquals(DEFAULT_PARALLELISM, computationCoGroup.getParallelism());
+		assertEquals(ShipStrategyType.FORWARD, computationCoGroup.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.PARTITION_HASH, computationCoGroup.getInput2().getShipStrategy());
+		assertTrue(computationCoGroup.getInput2().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), computationCoGroup.getInput2().getShipStrategyKeys());
+
+		// check that the initial partitioning is pushed out of the loop
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
 	}
 
 	@SuppressWarnings("serial")
@@ -283,7 +261,7 @@ public class PregelCompilerTest extends CompilerTestBase {
 
 			if ((getSuperstepNumber() == 1) || (currentComponent < vertex.getValue())) {
 				setNewVertexValue(currentComponent);
-				for (Edge<Long, NullValue> edge: getEdges()) {
+				for (Edge<Long, NullValue> edge : getEdges()) {
 					sendMessageTo(edge.getTarget(), currentComponent);
 				}
 			}
@@ -291,16 +269,15 @@ public class PregelCompilerTest extends CompilerTestBase {
 	}
 
 	@SuppressWarnings("serial")
-	public static final class CCCombiner extends MessageCombiner<Long, Long> {
+	private static final class CCCombiner extends MessageCombiner<Long, Long> {
 
 		public void combineMessages(MessageIterator<Long> messages) {
 
 			long minMessage = Long.MAX_VALUE;
-			for (Long msg: messages) {
+			for (Long msg : messages) {
 				minMessage = Math.min(minMessage, msg);
 			}
 			sendCombinedMessage(minMessage);
 		}
 	}
-
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java
index 3bf2e32..8084e71 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/pregel/PregelTranslationTest.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.graph.pregel;
 
 import org.apache.flink.api.common.aggregators.LongSumAggregator;
@@ -33,100 +32,92 @@ import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
+/**
+ * Test the creation of a {@link VertexCentricIteration} program.
+ */
 @SuppressWarnings("serial")
 public class PregelTranslationTest {
 
+	private static final String ITERATION_NAME = "Test Name";
+
+	private static final String AGGREGATOR_NAME = "AggregatorName";
+
+	private static final String BC_SET_NAME = "broadcast messages";
+
+	private static final int NUM_ITERATIONS = 13;
+
+	private static final int ITERATION_parallelism = 77;
+
 	@Test
 	public void testTranslationPlainEdges() {
-		try {
-			final String ITERATION_NAME = "Test Name";
-			
-			final String AGGREGATOR_NAME = "AggregatorName";
-			
-			final String BC_SET_NAME = "borat messages";
-
-			final int NUM_ITERATIONS = 13;
-			
-			final int ITERATION_parallelism = 77;
-			
-			
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			
-			DataSet<Long> bcVar = env.fromElements(1L);
-			
-			DataSet<Vertex<String, Double>> result;
-			
-			// ------------ construct the test program ------------------
-			{
-				
-				DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
-
-				DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
-
-				Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
-						edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
-
-							public Tuple3<String, String, NullValue> map(
-									Tuple2<String, String> edge) {
-								return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
-							}
-						}), env);
-
-				VertexCentricConfiguration parameters = new VertexCentricConfiguration();
-
-				parameters.addBroadcastSet(BC_SET_NAME, bcVar);
-				parameters.setName(ITERATION_NAME);
-				parameters.setParallelism(ITERATION_parallelism);
-				parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
-
-				result = graph.runVertexCentricIteration(new MyCompute(), null,
-						NUM_ITERATIONS, parameters).getVertices();
-
-				result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
-			}
-			
-			
-			// ------------- validate the java program ----------------
-			
-			assertTrue(result instanceof DeltaIterationResultSet);
-			
-			DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
-			DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
-			
-			// check the basic iteration properties
-			assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
-			assertArrayEquals(new int[] {0}, resultSet.getKeyPositions());
-			assertEquals(ITERATION_parallelism, iteration.getParallelism());
-			assertEquals(ITERATION_NAME, iteration.getName());
-			
-			assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
-			
-			TwoInputUdfOperator<?, ?, ?, ?> computationCoGroup =
-					(TwoInputUdfOperator<?, ?, ?, ?>) ((SingleInputUdfOperator<?, ?, ?>) resultSet.getNextWorkset()).getInput();
-			
-			// validate that the broadcast sets are forwarded
-			assertEquals(bcVar, computationCoGroup.getBroadcastSets().get(BC_SET_NAME));
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+
+		DataSet<Long> bcVar = env.fromElements(1L);
+
+		DataSet<Vertex<String, Double>> result;
+
+		// ------------ construct the test program ------------------
+
+		DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
+
+		DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
+
+		Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
+			edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
+
+				public Tuple3<String, String, NullValue> map(
+					Tuple2<String, String> edge) {
+					return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
+				}
+			}), env);
+
+		VertexCentricConfiguration parameters = new VertexCentricConfiguration();
+
+		parameters.addBroadcastSet(BC_SET_NAME, bcVar);
+		parameters.setName(ITERATION_NAME);
+		parameters.setParallelism(ITERATION_parallelism);
+		parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
+
+		result = graph.runVertexCentricIteration(new MyCompute(), null,
+			NUM_ITERATIONS, parameters).getVertices();
+
+		result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
+
+		// ------------- validate the java program ----------------
+
+		assertTrue(result instanceof DeltaIterationResultSet);
+
+		DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
+		DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
+
+		// check the basic iteration properties
+		assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
+		assertArrayEquals(new int[]{0}, resultSet.getKeyPositions());
+		assertEquals(ITERATION_parallelism, iteration.getParallelism());
+		assertEquals(ITERATION_NAME, iteration.getName());
+
+		assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
+
+		TwoInputUdfOperator<?, ?, ?, ?> computationCoGroup =
+			(TwoInputUdfOperator<?, ?, ?, ?>) ((SingleInputUdfOperator<?, ?, ?>) resultSet.getNextWorkset()).getInput();
+
+		// validate that the broadcast sets are forwarded
+		assertEquals(bcVar, computationCoGroup.getBroadcastSets().get(BC_SET_NAME));
 	}
-	
+
 	// --------------------------------------------------------------------------------------------
 
 	private static final class MyCompute extends ComputeFunction<String, Double, NullValue, Double> {
 
 		@Override
-		public void compute(Vertex<String, Double> vertex,
-				MessageIterator<Double> messages) throws Exception {}
+		public void compute(Vertex<String, Double> vertex, MessageIterator<Double> messages) throws Exception {
+		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java
index 676e0cd..1c6d08e 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelCompilerTest.java
@@ -40,13 +40,15 @@ import org.apache.flink.optimizer.util.CompilerTestBase;
 import org.apache.flink.runtime.operators.shipping.ShipStrategyType;
 import org.apache.flink.runtime.operators.util.LocalStrategy;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 
+/**
+ * Validate compiled {@link ScatterGatherIteration} programs.
+ */
 public class SpargelCompilerTest extends CompilerTestBase {
 
 	private static final long serialVersionUID = 1L;
@@ -54,161 +56,143 @@ public class SpargelCompilerTest extends CompilerTestBase {
 	@SuppressWarnings("serial")
 	@Test
 	public void testSpargelCompiler() {
-		try {
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-
-				DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
-						new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
-						.map(new Tuple2ToVertexMap<Long, Long>());
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
-					.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
-
-						public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
-							return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
-						}
-				});
-
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
-
-				DataSet<Vertex<Long, Long>> result = graph.runScatterGatherIteration(
-						new ConnectedComponents.CCMessenger<Long, Long>(BasicTypeInfo.LONG_TYPE_INFO),
-						new ConnectedComponents.CCUpdater<Long, Long>(), 100)
-						.getVertices();
-				
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-			}
-			
-			Plan p = env.createProgramPlan("Spargel Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-			
-			// check the solution set join and the delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update functions preserves the partitioning
-			
-			DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
-			assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
-			assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
-			
-			// check the workset set join
-			DualInputPlanNode edgeJoin = (DualInputPlanNode) ssJoin.getInput1().getSource();
-			assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
-			assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput2().getShipStrategy());
-			assertTrue(edgeJoin.getInput1().getTempMode().isCached());
-			
-			assertEquals(new FieldList(0), edgeJoin.getInput1().getShipStrategyKeys());
-			
-			// check that the initial partitioning is pushed out of the loop
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput2().getShipStrategy());
-			assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
-			assertEquals(new FieldList(0), iteration.getInput2().getShipStrategyKeys());
-			
-			// check that the initial workset sort is outside the loop
-			assertEquals(LocalStrategy.SORT, iteration.getInput2().getLocalStrategy());
-			assertEquals(new FieldList(0), iteration.getInput2().getLocalStrategyKeys());
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+
+		// compose test program
+		DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
+			new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
+			.map(new Tuple2ToVertexMap<Long, Long>());
+
+		DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
+			.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
+
+				public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
+					return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
+				}
+			});
+
+		Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
+
+		DataSet<Vertex<Long, Long>> result = graph.runScatterGatherIteration(
+			new ConnectedComponents.CCMessenger<Long, Long>(BasicTypeInfo.LONG_TYPE_INFO),
+			new ConnectedComponents.CCUpdater<Long, Long>(), 100)
+			.getVertices();
+
+		result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
+
+		Plan p = env.createProgramPlan("Spargel Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the solution set join and the delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update functions preserves the partitioning
+
+		DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
+		assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
+		assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
+
+		// check the workset set join
+		DualInputPlanNode edgeJoin = (DualInputPlanNode) ssJoin.getInput1().getSource();
+		assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
+		assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput2().getShipStrategy());
+		assertTrue(edgeJoin.getInput1().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), edgeJoin.getInput1().getShipStrategyKeys());
+
+		// check that the initial partitioning is pushed out of the loop
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput2().getShipStrategy());
+		assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
+		assertEquals(new FieldList(0), iteration.getInput2().getShipStrategyKeys());
+
+		// check that the initial workset sort is outside the loop
+		assertEquals(LocalStrategy.SORT, iteration.getInput2().getLocalStrategy());
+		assertEquals(new FieldList(0), iteration.getInput2().getLocalStrategyKeys());
 	}
-	
+
 	@SuppressWarnings("serial")
 	@Test
 	public void testSpargelCompilerWithBroadcastVariable() {
-		try {
-			final String BC_VAR_NAME = "borat variable";
-			
-			
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-			env.setParallelism(DEFAULT_PARALLELISM);
-			// compose test program
-			{
-				DataSet<Long> bcVar = env.fromElements(1L);
-
-				DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
-						new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
-						.map(new Tuple2ToVertexMap<Long, Long>());
-
-				DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
-						.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
-
-							public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
-								return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
-							}
-					});
-
-				Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
-
-				ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
-				parameters.addBroadcastSetForScatterFunction(BC_VAR_NAME, bcVar);
-				parameters.addBroadcastSetForGatherFunction(BC_VAR_NAME, bcVar);
-
-				DataSet<Vertex<Long, Long>> result = graph.runScatterGatherIteration(
-						new ConnectedComponents.CCMessenger<Long, Long>(BasicTypeInfo.LONG_TYPE_INFO),
-						new ConnectedComponents.CCUpdater<Long, Long>(), 100)
-						.getVertices();
-					
-				result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
-
-			}
-			
-			Plan p = env.createProgramPlan("Spargel Connected Components");
-			OptimizedPlan op = compileNoStats(p);
-			
-			// check the sink
-			SinkPlanNode sink = op.getDataSinks().iterator().next();
-			assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
-			assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
-			
-			// check the iteration
-			WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
-			assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
-			
-			// check the solution set join and the delta
-			PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
-			assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update functions preserves the partitioning
-			
-			DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
-			assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
-			assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
-			assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
-			
-			// check the workset set join
-			DualInputPlanNode edgeJoin = (DualInputPlanNode) ssJoin.getInput1().getSource();
-			assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
-			assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput2().getShipStrategy());
-			assertTrue(edgeJoin.getInput1().getTempMode().isCached());
-			
-			assertEquals(new FieldList(0), edgeJoin.getInput1().getShipStrategyKeys());
-			
-			// check that the initial partitioning is pushed out of the loop
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
-			assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput2().getShipStrategy());
-			assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
-			assertEquals(new FieldList(0), iteration.getInput2().getShipStrategyKeys());
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		final String broadcastVariableName = "broadcast variable";
+
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		env.setParallelism(DEFAULT_PARALLELISM);
+
+		// compose test program
+
+		DataSet<Long> bcVar = env.fromElements(1L);
+
+		DataSet<Vertex<Long, Long>> initialVertices = env.fromElements(
+			new Tuple2<>(1L, 1L), new Tuple2<>(2L, 2L))
+			.map(new Tuple2ToVertexMap<Long, Long>());
+
+		DataSet<Edge<Long, NullValue>> edges = env.fromElements(new Tuple2<>(1L, 2L))
+			.map(new MapFunction<Tuple2<Long, Long>, Edge<Long, NullValue>>() {
+
+				public Edge<Long, NullValue> map(Tuple2<Long, Long> edge) {
+					return new Edge<>(edge.f0, edge.f1, NullValue.getInstance());
+				}
+			});
+
+		Graph<Long, Long, NullValue> graph = Graph.fromDataSet(initialVertices, edges, env);
+
+		ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
+		parameters.addBroadcastSetForScatterFunction(broadcastVariableName, bcVar);
+		parameters.addBroadcastSetForGatherFunction(broadcastVariableName, bcVar);
+
+		DataSet<Vertex<Long, Long>> result = graph.runScatterGatherIteration(
+			new ConnectedComponents.CCMessenger<Long, Long>(BasicTypeInfo.LONG_TYPE_INFO),
+			new ConnectedComponents.CCUpdater<Long, Long>(), 100)
+			.getVertices();
+
+		result.output(new DiscardingOutputFormat<Vertex<Long, Long>>());
+
+		Plan p = env.createProgramPlan("Spargel Connected Components");
+		OptimizedPlan op = compileNoStats(p);
+
+		// check the sink
+		SinkPlanNode sink = op.getDataSinks().iterator().next();
+		assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
+		assertEquals(DEFAULT_PARALLELISM, sink.getParallelism());
+
+		// check the iteration
+		WorksetIterationPlanNode iteration = (WorksetIterationPlanNode) sink.getInput().getSource();
+		assertEquals(DEFAULT_PARALLELISM, iteration.getParallelism());
+
+		// check the solution set join and the delta
+		PlanNode ssDelta = iteration.getSolutionSetDeltaPlanNode();
+		assertTrue(ssDelta instanceof DualInputPlanNode); // this is only true if the update functions preserves the partitioning
+
+		DualInputPlanNode ssJoin = (DualInputPlanNode) ssDelta;
+		assertEquals(DEFAULT_PARALLELISM, ssJoin.getParallelism());
+		assertEquals(ShipStrategyType.PARTITION_HASH, ssJoin.getInput1().getShipStrategy());
+		assertEquals(new FieldList(0), ssJoin.getInput1().getShipStrategyKeys());
+
+		// check the workset set join
+		DualInputPlanNode edgeJoin = (DualInputPlanNode) ssJoin.getInput1().getSource();
+		assertEquals(DEFAULT_PARALLELISM, edgeJoin.getParallelism());
+		assertEquals(ShipStrategyType.PARTITION_HASH, edgeJoin.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.FORWARD, edgeJoin.getInput2().getShipStrategy());
+		assertTrue(edgeJoin.getInput1().getTempMode().isCached());
+
+		assertEquals(new FieldList(0), edgeJoin.getInput1().getShipStrategyKeys());
+
+		// check that the initial partitioning is pushed out of the loop
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput1().getShipStrategy());
+		assertEquals(ShipStrategyType.PARTITION_HASH, iteration.getInput2().getShipStrategy());
+		assertEquals(new FieldList(0), iteration.getInput1().getShipStrategyKeys());
+		assertEquals(new FieldList(0), iteration.getInput2().getShipStrategyKeys());
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java
index 47b785d..d209a2d 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/spargel/SpargelTranslationTest.java
@@ -31,187 +31,156 @@ import org.apache.flink.api.java.tuple.Tuple3;
 import org.apache.flink.graph.Graph;
 import org.apache.flink.graph.Vertex;
 import org.apache.flink.types.NullValue;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
+/**
+ * Test the creation of a {@link ScatterGatherIteration} program.
+ */
 @SuppressWarnings("serial")
 public class SpargelTranslationTest {
 
-	@Test
-	public void testTranslationPlainEdges() {
-		try {
-			final String ITERATION_NAME = "Test Name";
-
-			final String AGGREGATOR_NAME = "AggregatorName";
-
-			final String BC_SET_MESSAGES_NAME = "borat messages";
+	private static final String ITERATION_NAME = "Test Name";
 
-			final String BC_SET_UPDATES_NAME = "borat updates";
+	private static final String AGGREGATOR_NAME = "AggregatorName";
 
-			final int NUM_ITERATIONS = 13;
+	private static final String BC_SET_MESSAGES_NAME = "borat messages";
 
-			final int ITERATION_parallelism = 77;
+	private static final String BC_SET_UPDATES_NAME = "borat updates";
 
+	private static final int NUM_ITERATIONS = 13;
 
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+	private static final int ITERATION_parallelism = 77;
 
-			DataSet<Long> bcMessaging = env.fromElements(1L);
-			DataSet<Long> bcUpdate = env.fromElements(1L);
+	@Test
+	public void testTranslationPlainEdges() {
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 
-			DataSet<Vertex<String, Double>> result;
+		DataSet<Long> bcMessaging = env.fromElements(1L);
+		DataSet<Long> bcUpdate = env.fromElements(1L);
 
-			// ------------ construct the test program ------------------
-			{
+		DataSet<Vertex<String, Double>> result;
 
-				DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
+		// ------------ construct the test program ------------------
 
-				DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
+		DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
 
-				Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
-						edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
+		DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
 
-							public Tuple3<String, String, NullValue> map(
-									Tuple2<String, String> edge) {
-								return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
-							}
-						}), env);
+		Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
+			edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
 
-				ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
+				public Tuple3<String, String, NullValue> map(
+					Tuple2<String, String> edge) {
+					return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
+				}
+			}), env);
 
-				parameters.addBroadcastSetForScatterFunction(BC_SET_MESSAGES_NAME, bcMessaging);
-				parameters.addBroadcastSetForGatherFunction(BC_SET_UPDATES_NAME, bcUpdate);
-				parameters.setName(ITERATION_NAME);
-				parameters.setParallelism(ITERATION_parallelism);
-				parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
+		ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
 
-				result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(),
-						NUM_ITERATIONS, parameters).getVertices();
+		parameters.addBroadcastSetForScatterFunction(BC_SET_MESSAGES_NAME, bcMessaging);
+		parameters.addBroadcastSetForGatherFunction(BC_SET_UPDATES_NAME, bcUpdate);
+		parameters.setName(ITERATION_NAME);
+		parameters.setParallelism(ITERATION_parallelism);
+		parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
 
-				result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
-			}
+		result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(),
+			NUM_ITERATIONS, parameters).getVertices();
 
+		result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
 
-			// ------------- validate the java program ----------------
+		// ------------- validate the java program ----------------
 
-			assertTrue(result instanceof DeltaIterationResultSet);
+		assertTrue(result instanceof DeltaIterationResultSet);
 
-			DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
-			DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
+		DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
+		DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
 
-			// check the basic iteration properties
-			assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
-			assertArrayEquals(new int[] {0}, resultSet.getKeyPositions());
-			assertEquals(ITERATION_parallelism, iteration.getParallelism());
-			assertEquals(ITERATION_NAME, iteration.getName());
+		// check the basic iteration properties
+		assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
+		assertArrayEquals(new int[]{0}, resultSet.getKeyPositions());
+		assertEquals(ITERATION_parallelism, iteration.getParallelism());
+		assertEquals(ITERATION_NAME, iteration.getName());
 
-			assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
+		assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
 
-			// validate that the semantic properties are set as they should
-			TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
+		// validate that the semantic properties are set as they should
+		TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
 
-			TwoInputUdfOperator<?, ?, ?, ?> edgesJoin = (TwoInputUdfOperator<?, ?, ?, ?>) solutionSetJoin.getInput1();
+		TwoInputUdfOperator<?, ?, ?, ?> edgesJoin = (TwoInputUdfOperator<?, ?, ?, ?>) solutionSetJoin.getInput1();
 
-			// validate that the broadcast sets are forwarded
-			assertEquals(bcUpdate, solutionSetJoin.getBroadcastSets().get(BC_SET_UPDATES_NAME));
-			assertEquals(bcMessaging, edgesJoin.getBroadcastSets().get(BC_SET_MESSAGES_NAME));
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		// validate that the broadcast sets are forwarded
+		assertEquals(bcUpdate, solutionSetJoin.getBroadcastSets().get(BC_SET_UPDATES_NAME));
+		assertEquals(bcMessaging, edgesJoin.getBroadcastSets().get(BC_SET_MESSAGES_NAME));
 	}
 
 	@Test
 	public void testTranslationPlainEdgesWithForkedBroadcastVariable() {
-		try {
-			final String ITERATION_NAME = "Test Name";
-
-			final String AGGREGATOR_NAME = "AggregatorName";
+		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 
-			final String BC_SET_MESSAGES_NAME = "borat messages";
+		DataSet<Long> bcVar = env.fromElements(1L);
 
-			final String BC_SET_UPDATES_NAME = "borat updates";
+		DataSet<Vertex<String, Double>> result;
 
-			final int NUM_ITERATIONS = 13;
+		// ------------ construct the test program ------------------
 
-			final int ITERATION_parallelism = 77;
+		DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
 
+		DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
 
-			ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
+		Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
+			edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
 
-			DataSet<Long> bcVar = env.fromElements(1L);
+				public Tuple3<String, String, NullValue> map(
+					Tuple2<String, String> edge) {
+					return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
+				}
+			}), env);
 
-			DataSet<Vertex<String, Double>> result;
+		ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
 
-			// ------------ construct the test program ------------------
-			{
+		parameters.addBroadcastSetForScatterFunction(BC_SET_MESSAGES_NAME, bcVar);
+		parameters.addBroadcastSetForGatherFunction(BC_SET_UPDATES_NAME, bcVar);
+		parameters.setName(ITERATION_NAME);
+		parameters.setParallelism(ITERATION_parallelism);
+		parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
 
-				DataSet<Tuple2<String, Double>> initialVertices = env.fromElements(new Tuple2<>("abc", 3.44));
+		result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(),
+			NUM_ITERATIONS, parameters).getVertices();
 
-				DataSet<Tuple2<String, String>> edges = env.fromElements(new Tuple2<>("a", "c"));
+		result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
 
-				Graph<String, Double, NullValue> graph = Graph.fromTupleDataSet(initialVertices,
-						edges.map(new MapFunction<Tuple2<String, String>, Tuple3<String, String, NullValue>>() {
+		// ------------- validate the java program ----------------
 
-							public Tuple3<String, String, NullValue> map(
-									Tuple2<String, String> edge) {
-								return new Tuple3<>(edge.f0, edge.f1, NullValue.getInstance());
-							}
-						}), env);
+		assertTrue(result instanceof DeltaIterationResultSet);
 
-				ScatterGatherConfiguration parameters = new ScatterGatherConfiguration();
+		DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
+		DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
 
-				parameters.addBroadcastSetForScatterFunction(BC_SET_MESSAGES_NAME, bcVar);
-				parameters.addBroadcastSetForGatherFunction(BC_SET_UPDATES_NAME, bcVar);
-				parameters.setName(ITERATION_NAME);
-				parameters.setParallelism(ITERATION_parallelism);
-				parameters.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
+		// check the basic iteration properties
+		assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
+		assertArrayEquals(new int[]{0}, resultSet.getKeyPositions());
+		assertEquals(ITERATION_parallelism, iteration.getParallelism());
+		assertEquals(ITERATION_NAME, iteration.getName());
 
-				result = graph.runScatterGatherIteration(new MessageFunctionNoEdgeValue(), new UpdateFunction(),
-						NUM_ITERATIONS, parameters).getVertices();
+		assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
 
-				result.output(new DiscardingOutputFormat<Vertex<String, Double>>());
-			}
+		// validate that the semantic properties are set as they should
+		TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
+		assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
 
+		TwoInputUdfOperator<?, ?, ?, ?> edgesJoin = (TwoInputUdfOperator<?, ?, ?, ?>) solutionSetJoin.getInput1();
 
-			// ------------- validate the java program ----------------
-
-			assertTrue(result instanceof DeltaIterationResultSet);
-
-			DeltaIterationResultSet<?, ?> resultSet = (DeltaIterationResultSet<?, ?>) result;
-			DeltaIteration<?, ?> iteration = resultSet.getIterationHead();
-
-			// check the basic iteration properties
-			assertEquals(NUM_ITERATIONS, resultSet.getMaxIterations());
-			assertArrayEquals(new int[] {0}, resultSet.getKeyPositions());
-			assertEquals(ITERATION_parallelism, iteration.getParallelism());
-			assertEquals(ITERATION_NAME, iteration.getName());
-
-			assertEquals(AGGREGATOR_NAME, iteration.getAggregators().getAllRegisteredAggregators().iterator().next().getName());
-
-			// validate that the semantic properties are set as they should
-			TwoInputUdfOperator<?, ?, ?, ?> solutionSetJoin = (TwoInputUdfOperator<?, ?, ?, ?>) resultSet.getNextWorkset();
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(0, 0).contains(0));
-			assertTrue(solutionSetJoin.getSemanticProperties().getForwardingTargetFields(1, 0).contains(0));
-
-			TwoInputUdfOperator<?, ?, ?, ?> edgesJoin = (TwoInputUdfOperator<?, ?, ?, ?>) solutionSetJoin.getInput1();
-
-			// validate that the broadcast sets are forwarded
-			assertEquals(bcVar, solutionSetJoin.getBroadcastSets().get(BC_SET_UPDATES_NAME));
-			assertEquals(bcVar, edgesJoin.getBroadcastSets().get(BC_SET_MESSAGES_NAME));
-		}
-		catch (Exception e) {
-			System.err.println(e.getMessage());
-			e.printStackTrace();
-			fail(e.getMessage());
-		}
+		// validate that the broadcast sets are forwarded
+		assertEquals(bcVar, solutionSetJoin.getBroadcastSets().get(BC_SET_UPDATES_NAME));
+		assertEquals(bcVar, edgesJoin.getBroadcastSets().get(BC_SET_MESSAGES_NAME));
 	}
 
 	// --------------------------------------------------------------------------------------------
@@ -219,12 +188,14 @@ public class SpargelTranslationTest {
 	private static class MessageFunctionNoEdgeValue extends ScatterFunction<String, Double, Long, NullValue> {
 
 		@Override
-		public void sendMessages(Vertex<String, Double> vertex) {}
+		public void sendMessages(Vertex<String, Double> vertex) {
+		}
 	}
 
 	private static class UpdateFunction extends GatherFunction<String, Double, Long> {
 
 		@Override
-		public void updateVertex(Vertex<String, Double> vertex, MessageIterator<Long> inMessages) {}
+		public void updateVertex(Vertex<String, Double> vertex, MessageIterator<Long> inMessages) {
+		}
 	}
 }

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java
index 78f0ba2..2454b38 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/CollectionModeSuperstepITCase.java
@@ -29,18 +29,19 @@ import org.apache.flink.graph.spargel.MessageIterator;
 import org.apache.flink.graph.spargel.ScatterFunction;
 import org.apache.flink.graph.utils.VertexToTuple2Map;
 import org.apache.flink.util.TestLogger;
+
 import org.junit.Assert;
 import org.junit.Test;
 
+/**
+ * Dummy iteration to test that the supersteps are correctly incremented
+ * and can be retrieved from inside the scatter and gather functions.
+ * All vertices start with value 1 and increase their value by 1
+ * in each iteration.
+ */
 @SuppressWarnings("serial")
 public class CollectionModeSuperstepITCase extends TestLogger {
 
-	/**
-	 * Dummy iteration to test that the supersteps are correctly incremented
-	 * and can be retrieved from inside the scatter and gather functions.
-	 * All vertices start with value 1 and increase their value by 1
-	 * in each iteration. 
-	 */
 	@Test
 	public void testProgram() throws Exception {
 		ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/GatherSumApplyConfigurationITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/GatherSumApplyConfigurationITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/GatherSumApplyConfigurationITCase.java
index 183522d..f866f38 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/GatherSumApplyConfigurationITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/GatherSumApplyConfigurationITCase.java
@@ -34,6 +34,7 @@ import org.apache.flink.graph.gsa.Neighbor;
 import org.apache.flink.graph.gsa.SumFunction;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.LongValue;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -42,6 +43,9 @@ import org.junit.runners.Parameterized;
 import java.util.HashSet;
 import java.util.List;
 
+/**
+ * Tests for {@link GSAConfiguration}.
+ */
 @RunWith(Parameterized.class)
 public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase {
 
@@ -235,7 +239,7 @@ public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase
 
 			// test bcast variable
 			@SuppressWarnings("unchecked")
-			List<Integer> bcastSet = (List<Integer>)(List<?>)getBroadcastSet("gatherBcastSet");
+			List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("gatherBcastSet");
 			Assert.assertEquals(1, bcastSet.get(0).intValue());
 			Assert.assertEquals(2, bcastSet.get(1).intValue());
 			Assert.assertEquals(3, bcastSet.get(2).intValue());
@@ -266,7 +270,7 @@ public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase
 
 			// test bcast variable
 			@SuppressWarnings("unchecked")
-			List<Integer> bcastSet = (List<Integer>)(List<?>)getBroadcastSet("sumBcastSet");
+			List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("sumBcastSet");
 			Assert.assertEquals(4, bcastSet.get(0).intValue());
 			Assert.assertEquals(5, bcastSet.get(1).intValue());
 			Assert.assertEquals(6, bcastSet.get(2).intValue());
@@ -295,7 +299,7 @@ public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase
 
 			// test bcast variable
 			@SuppressWarnings("unchecked")
-			List<Integer> bcastSet = (List<Integer>)(List<?>)getBroadcastSet("applyBcastSet");
+			List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("applyBcastSet");
 			Assert.assertEquals(7, bcastSet.get(0).intValue());
 			Assert.assertEquals(8, bcastSet.get(1).intValue());
 			Assert.assertEquals(9, bcastSet.get(2).intValue());
@@ -346,7 +350,7 @@ public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase
 	}
 
 	@SuppressWarnings("serial")
-	public static final class AssignOneMapper implements MapFunction<Vertex<Long, Long>, Long> {
+	private static final class AssignOneMapper implements MapFunction<Vertex<Long, Long>, Long> {
 
 		public Long map(Vertex<Long, Long> value) {
 			return 1L;
@@ -354,7 +358,7 @@ public class GatherSumApplyConfigurationITCase extends MultipleProgramsTestBase
 	}
 
 	@SuppressWarnings("serial")
-	public static final class InitialiseHashSetMapper implements MapFunction<Vertex<Long, Long>, HashSet<Long>> {
+	private static final class InitialiseHashSetMapper implements MapFunction<Vertex<Long, Long>, HashSet<Long>> {
 
 		@Override
 		public HashSet<Long> map(Vertex<Long, Long> value) throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/d313ac76/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java
index 3c091a9..139ff1e 100644
--- a/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java
+++ b/flink-libraries/flink-gelly/src/test/java/org/apache/flink/graph/test/ScatterGatherConfigurationITCase.java
@@ -35,6 +35,7 @@ import org.apache.flink.graph.spargel.ScatterGatherIteration;
 import org.apache.flink.graph.utils.VertexToTuple2Map;
 import org.apache.flink.test.util.MultipleProgramsTestBase;
 import org.apache.flink.types.LongValue;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -43,6 +44,9 @@ import org.junit.runners.Parameterized;
 import java.util.HashSet;
 import java.util.List;
 
+/**
+ * Tests for {@link ScatterGatherConfiguration}.
+ */
 @RunWith(Parameterized.class)
 public class ScatterGatherConfigurationITCase extends MultipleProgramsTestBase {
 
@@ -133,7 +137,6 @@ public class ScatterGatherConfigurationITCase extends MultipleProgramsTestBase {
 		Graph<Long, Long, Long> res = graph.runScatterGatherIteration(
 			new MessageFunctionDefault(), new UpdateFunctionDefault(), 5);
 
-
 		DataSet<Tuple2<Long, Long>> data = res.getVertices().map(new VertexToTuple2Map<Long, Long>());
 		List<Tuple2<Long, Long>> result = data.collect();
 
@@ -333,7 +336,6 @@ public class ScatterGatherConfigurationITCase extends MultipleProgramsTestBase {
 		compareResultAsTuples(result, expectedResult);
 	}
 
-
 	@Test
 	public void testNumVerticesNotSet() throws Exception {
 
@@ -508,7 +510,7 @@ public class ScatterGatherConfigurationITCase extends MultipleProgramsTestBase {
 
 			// test bcast variable
 			@SuppressWarnings("unchecked")
-			List<Integer> bcastSet = (List<Integer>)(List<?>)getBroadcastSet("messagingBcastSet");
+			List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("messagingBcastSet");
 			Assert.assertEquals(4, bcastSet.get(0).intValue());
 			Assert.assertEquals(5, bcastSet.get(1).intValue());
 			Assert.assertEquals(6, bcastSet.get(2).intValue());
@@ -556,7 +558,7 @@ public class ScatterGatherConfigurationITCase extends MultipleProgramsTestBase {
 
 			// test bcast variable
 			@SuppressWarnings("unchecked")
-			List<Integer> bcastSet = (List<Integer>)(List<?>)getBroadcastSet("updateBcastSet");
+			List<Integer> bcastSet = (List<Integer>) (List<?>) getBroadcastSet("updateBcastSet");
 			Assert.assertEquals(1, bcastSet.get(0).intValue());
 			Assert.assertEquals(2, bcastSet.get(1).intValue());
 			Assert.assertEquals(3, bcastSet.get(2).intValue());