You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by gr...@apache.org on 2016/12/23 16:17:09 UTC

flink git commit: [hotfix] Minutiae

Repository: flink
Updated Branches:
  refs/heads/master d163f8416 -> 5b4e3d889


[hotfix] Minutiae


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/5b4e3d88
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/5b4e3d88
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/5b4e3d88

Branch: refs/heads/master
Commit: 5b4e3d889384850d05665f9266d53cc7a6c032f5
Parents: d163f84
Author: Greg Hogan <co...@greghogan.com>
Authored: Tue Aug 23 09:22:23 2016 -0400
Committer: Greg Hogan <co...@greghogan.com>
Committed: Fri Dec 23 11:13:13 2016 -0500

----------------------------------------------------------------------
 docs/setup/config.md                            |  4 ++--
 .../api/common/functions/CrossFunction.java     |  8 ++++----
 .../api/common/typeutils/TypeComparator.java    | 20 +++++++++-----------
 .../typeutils/runtime/EitherSerializer.java     | 14 +++++++-------
 .../main/java/org/apache/flink/graph/Edge.java  |  2 +-
 .../python/api/flink/plan/OperationInfo.py      |  2 +-
 .../main/resources/archetype-resources/pom.xml  |  2 +-
 .../task/IterationIntermediateTask.java         |  2 +-
 .../flink/runtime/metrics/util/MetricUtils.java |  2 +-
 .../flink/runtime/operators/CrossDriver.java    |  2 +-
 .../flink/runtime/operators/FlatMapDriver.java  |  3 ++-
 .../flink/yarn/cli/FlinkYarnSessionCli.java     |  2 +-
 tools/deploy_to_maven.sh                        |  2 +-
 13 files changed, 32 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/docs/setup/config.md
----------------------------------------------------------------------
diff --git a/docs/setup/config.md b/docs/setup/config.md
index 5c13e43..ecb8c25 100644
--- a/docs/setup/config.md
+++ b/docs/setup/config.md
@@ -130,7 +130,7 @@ For Kafka and ZK, process-wide JAAS config will be created using the provided se
 
 ### Other
 
-- `taskmanager.tmp.dirs`: The directory for temporary files, or a list of directories separated by the systems directory delimiter (for example ':' (colon) on Linux/Unix). If multiple directories are specified, then the temporary files will be distributed across the directories in a round-robin fashion. The I/O manager component will spawn one reading and one writing thread per directory. A directory may be listed multiple times to have the I/O manager use multiple threads for it (for example if it is physically stored on a very fast disc or RAID) (DEFAULT: The system's tmp dir).
+- `taskmanager.tmp.dirs`: The directory for temporary files, or a list of directories separated by the system's directory delimiter (for example ':' (colon) on Linux/Unix). If multiple directories are specified, then the temporary files will be distributed across the directories in a round-robin fashion. The I/O manager component will spawn one reading and one writing thread per directory. A directory may be listed multiple times to have the I/O manager use multiple threads for it (for example if it is physically stored on a very fast disc or RAID) (DEFAULT: The system's tmp dir).
 
 - `taskmanager.log.path`: The config parameter defining the taskmanager log file location
 
@@ -224,7 +224,7 @@ The following parameters configure Flink's JobManager and TaskManagers.
 
 - `taskmanager.numberOfTaskSlots`: The number of parallel operator or user function instances that a single TaskManager can run (DEFAULT: **1**). If this value is larger than 1, a single TaskManager takes multiple instances of a function or operator. That way, the TaskManager can utilize multiple CPU cores, but at the same time, the available memory is divided between the different operator or function instances. This value is typically proportional to the number of physical CPU cores that the TaskManager's machine has (e.g., equal to the number of cores, or half the number of cores).
 
-- `taskmanager.tmp.dirs`: The directory for temporary files, or a list of directories separated by the systems directory delimiter (for example ':' (colon) on Linux/Unix). If multiple directories are specified, then the temporary files will be distributed across the directories in a round robin fashion. The I/O manager component will spawn one reading and one writing thread per directory. A directory may be listed multiple times to have the I/O manager use multiple threads for it (for example if it is physically stored on a very fast disc or RAID) (DEFAULT: **The system's tmp dir**).
+- `taskmanager.tmp.dirs`: The directory for temporary files, or a list of directories separated by the system's directory delimiter (for example ':' (colon) on Linux/Unix). If multiple directories are specified, then the temporary files will be distributed across the directories in a round robin fashion. The I/O manager component will spawn one reading and one writing thread per directory. A directory may be listed multiple times to have the I/O manager use multiple threads for it (for example if it is physically stored on a very fast disc or RAID) (DEFAULT: **The system's tmp dir**).
 
 - `taskmanager.network.numberOfBuffers`: The number of buffers available to the network stack. This number determines how many streaming data exchange channels a TaskManager can have at the same time and how well buffered the channels are. If a job is rejected or you get a warning that the system has not enough buffers available, increase this value (DEFAULT: **2048**).
 

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
index d264e02..e29242f 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
@@ -23,11 +23,11 @@ import org.apache.flink.annotation.Public;
 import java.io.Serializable;
 
 /**
- * Interface for Cross functions. Cross functions are applied to the Cartesian produce of their inputs
- * and call are called for each pair of elements.
+ * Interface for Cross functions. Cross functions are applied to the Cartesian product
+ * of their inputs and are called for each pair of elements.
  * 
- * They are optional, a means of convenience the can be used to directly produce manipulate the
- * pair of elements, instead of processing 2-tuples that contain the pairs.
+ * They are optional, a means of convenience that can be used to directly manipulate the
+ * pair of elements instead of producing 2-tuples containing the pairs.
  * <p>
  * The basic syntax for using Cross on two data sets is as follows:
  * <pre>{@code

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
index afacb9f..5c0fa7d 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
@@ -18,14 +18,14 @@
 
 package org.apache.flink.api.common.typeutils;
 
-import java.io.IOException;
-import java.io.Serializable;
-
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataOutputView;
 import org.apache.flink.core.memory.MemorySegment;
 
+import java.io.IOException;
+import java.io.Serializable;
+
 /**
  * This interface describes the methods that are required for a data type to be handled by the pact
  * runtime. Specifically, this interface contains the methods used for hashing, comparing, and creating
@@ -40,7 +40,7 @@ import org.apache.flink.core.memory.MemorySegment;
  * comparisons and later comparing a candidate against it. Therefore, the classes implementing this interface are
  * not thread safe. The runtime will ensure that no instance is used twice in different threads, but will create
  * a copy for that purpose. It is hence imperative that the copies created by the {@link #duplicate()} method
- * share no state with the instance from which they were copied: They have to be deep copies.  
+ * share no state with the instance from which they were copied: they have to be deep copies.
  *
  * @see java.lang.Object#hashCode()
  * @see java.lang.Object#equals(Object)
@@ -146,17 +146,15 @@ public abstract class TypeComparator<T> implements Serializable {
 	 */
 	public abstract int compareToReference(TypeComparator<T> referencedComparator);
 
-	// These are two special case methods that the runtime uses for special "PactRecord" support
+	// A special case method that the runtime uses for special "PactRecord" support
 	public boolean supportsCompareAgainstReference() {
 		return false;
 	}
 	
 	/**
-	 * Compares two records in serialized from. The return value indicates the order of the two in the same way
+	 * Compares two records in object form. The return value indicates the order of the two in the same way
 	 * as defined by {@link java.util.Comparator#compare(Object, Object)}.
-	 * <p>
-	 * This method may de-serialize the records or compare them directly based on their binary representation. 
-	 * 
+	 *
 	 * @param first The first record.
 	 * @param second The second record.
 	 * @return An integer defining the oder among the objects in the same way as {@link java.util.Comparator#compare(Object, Object)}.
@@ -166,7 +164,7 @@ public abstract class TypeComparator<T> implements Serializable {
 	public abstract int compare(T first, T second);
 	
 	/**
-	 * Compares two records in serialized from. The return value indicates the order of the two in the same way
+	 * Compares two records in serialized form. The return value indicates the order of the two in the same way
 	 * as defined by {@link java.util.Comparator#compare(Object, Object)}.
 	 * <p>
 	 * This method may de-serialize the records or compare them directly based on their binary representation. 
@@ -218,7 +216,7 @@ public abstract class TypeComparator<T> implements Serializable {
 	
 	/**
 	 * Writes a normalized key for the given record into the target byte array, starting at the specified position
-	 * an writing exactly the given number of bytes. Note that the comparison of the bytes is treating the bytes
+	 * and writing exactly the given number of bytes. Note that the comparison of the bytes is treating the bytes
 	 * as unsigned bytes: {@code int byteI = bytes[i] & 0xFF;}
 	 * <p>
 	 * If the meaningful part of the normalized key takes less than the given number of bytes, than it must be padded.

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/EitherSerializer.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/EitherSerializer.java b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/EitherSerializer.java
index 7c9676b..e5d9070 100644
--- a/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/EitherSerializer.java
+++ b/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/EitherSerializer.java
@@ -18,18 +18,18 @@
 
 package org.apache.flink.api.java.typeutils.runtime;
 
-import static org.apache.flink.types.Either.Left;
-import static org.apache.flink.types.Either.Right;
-
-import java.io.IOException;
-
 import org.apache.flink.api.common.typeutils.TypeSerializer;
-import org.apache.flink.types.Either;
 import org.apache.flink.core.memory.DataInputView;
 import org.apache.flink.core.memory.DataOutputView;
+import org.apache.flink.types.Either;
+
+import java.io.IOException;
+
+import static org.apache.flink.types.Either.Left;
+import static org.apache.flink.types.Either.Right;
 
 /**
- * A {@link TypeSerializer} for the {@ link Either} type of the Java class.
+ * A {@link TypeSerializer} for the {@link Either} type of the Java class.
  *
  * @param <L> the Left value type
  * @param <R> the Right value type

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Edge.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Edge.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Edge.java
index 8e5f916..28d55bd 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Edge.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/Edge.java
@@ -28,7 +28,7 @@ import org.apache.flink.api.java.tuple.Tuple3;
  * @param <K> the key type for the sources and target vertices
  * @param <V> the edge value type
  */
-public class Edge<K, V> extends Tuple3<K, K, V>{
+public class Edge<K, V> extends Tuple3<K, K, V> {
 
 	private static final long serialVersionUID = 1L;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-libraries/flink-python/src/main/python/org/apache/flink/python/api/flink/plan/OperationInfo.py
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-python/src/main/python/org/apache/flink/python/api/flink/plan/OperationInfo.py b/flink-libraries/flink-python/src/main/python/org/apache/flink/python/api/flink/plan/OperationInfo.py
index fcda712..130256a 100644
--- a/flink-libraries/flink-python/src/main/python/org/apache/flink/python/api/flink/plan/OperationInfo.py
+++ b/flink-libraries/flink-python/src/main/python/org/apache/flink/python/api/flink/plan/OperationInfo.py
@@ -7,7 +7,7 @@
 #  "License"); you may not use this file except in compliance
 #  with the License.  You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE2.0
+#      http://www.apache.org/licenses/LICENSE-2.0
 #
 #  Unless required by applicable law or agreed to in writing, software
 #  distributed under the License is distributed on an "AS IS" BASIS,

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/pom.xml
----------------------------------------------------------------------
diff --git a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/pom.xml b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/pom.xml
index 9d83ebd..6ae8f16 100644
--- a/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/pom.xml
+++ b/flink-quickstart/flink-quickstart-java/src/main/resources/archetype-resources/pom.xml
@@ -177,7 +177,7 @@ under the License.
 	<build>
 		<plugins>
 			<!-- We use the maven-shade plugin to create a fat jar that contains all dependencies
-			except flink and it's transitive dependencies. The resulting fat-jar can be executed
+			except flink and its transitive dependencies. The resulting fat-jar can be executed
 			on a cluster. Change the value of Program-Class if your program entry point changes. -->
 			<plugin>
 				<groupId>org.apache.maven.plugins</groupId>

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java b/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java
index cb77300..41f8e31 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/IterationIntermediateTask.java
@@ -39,7 +39,7 @@ import java.io.IOException;
  * intermediate tasks can also update the iteration state, either the workset or the solution set.
  * <p>
  * If the iteration state is updated, the output of this task will be send back to the {@link IterationHeadTask} via
- * a {@link BlockingBackChannel} for the workset -XOR- a eHashTable for the solution set. In this case
+ * a {@link BlockingBackChannel} for the workset -XOR- a HashTable for the solution set. In this case
  * this task must be scheduled on the same instance as the head.
  */
 public class IterationIntermediateTask<S extends Function, OT> extends AbstractIterativeTask<S, OT> {

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/util/MetricUtils.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/util/MetricUtils.java b/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/util/MetricUtils.java
index a10dc3b..4612eaf 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/util/MetricUtils.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/metrics/util/MetricUtils.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *     http//www.apache.org/licenses/LICENSE-2.0
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CrossDriver.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CrossDriver.java b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CrossDriver.java
index c3f3958..0a887a1 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CrossDriver.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CrossDriver.java
@@ -57,7 +57,7 @@ public class CrossDriver<T1, T2, OT> implements Driver<CrossFunction<T1, T2, OT>
 	
 	private BlockResettableMutableObjectIterator<?> blockIter;
 	
-	private int  memPagesForBlockSide;
+	private int memPagesForBlockSide;
 	
 	private int memPagesForSpillingSide;
 

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-runtime/src/main/java/org/apache/flink/runtime/operators/FlatMapDriver.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/FlatMapDriver.java b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/FlatMapDriver.java
index 1a8f813..526c270 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/FlatMapDriver.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/FlatMapDriver.java
@@ -81,7 +81,8 @@ public class FlatMapDriver<IT, OT> implements Driver<FlatMapFunction<IT, OT>, OT
 
 		if (LOG.isDebugEnabled()) {
 			LOG.debug("FlatMapDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
-		}	}
+		}
+	}
 
 	@Override
 	public void run() throws Exception {

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
index 5606719..863b442 100644
--- a/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
+++ b/flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
@@ -208,7 +208,7 @@ public class FlinkYarnSessionCli implements CustomCommandLine<YarnClusterClient>
 		String applicationID = yarnProperties.getProperty(YARN_APPLICATION_ID_KEY);
 		if (applicationID == null) {
 			throw new IllegalConfigurationException("Yarn properties file found but doesn't contain a " +
-				"Yarn applicaiton id. Please delete the file at " + propertiesFile.getAbsolutePath());
+				"Yarn application id. Please delete the file at " + propertiesFile.getAbsolutePath());
 		}
 
 		try {

http://git-wip-us.apache.org/repos/asf/flink/blob/5b4e3d88/tools/deploy_to_maven.sh
----------------------------------------------------------------------
diff --git a/tools/deploy_to_maven.sh b/tools/deploy_to_maven.sh
index 676d8d0..c818a42 100755
--- a/tools/deploy_to_maven.sh
+++ b/tools/deploy_to_maven.sh
@@ -66,7 +66,7 @@ function deploy_to_s3() {
 pwd
 
 
-echo "install lifecylce mapping fake plugin"
+echo "install lifecycle mapping fake plugin"
 git clone https://github.com/mfriedenhagen/dummy-lifecycle-mapping-plugin.git
 cd dummy-lifecycle-mapping-plugin
 mvn -B install