You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by se...@apache.org on 2018/01/05 18:38:43 UTC
[02/19] flink git commit: [hotfix] Fix many many typos
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/conf/Configuration.java b/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/conf/Configuration.java
index 16f162d..86822de 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -19,6 +19,43 @@
package org.apache.hadoop.conf;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import org.apache.commons.collections.map.UnmodifiableMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringInterner;
+import org.apache.hadoop.util.StringUtils;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.w3c.dom.DOMException;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Text;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
import java.io.BufferedInputStream;
import java.io.DataInput;
@@ -56,51 +93,12 @@ import java.util.StringTokenizer;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-
-import com.google.common.base.Charsets;
-import org.apache.commons.collections.map.UnmodifiableMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.alias.CredentialProvider;
-import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
-import org.apache.hadoop.security.alias.CredentialProviderFactory;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringInterner;
-import org.apache.hadoop.util.StringUtils;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonGenerator;
-import org.w3c.dom.DOMException;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Text;
-import org.xml.sax.SAXException;
-
-import com.google.common.base.Preconditions;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
/**
* Provides access to configuration parameters.
@@ -1937,7 +1935,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
* Get the value for a known password configuration element.
* In order to enable the elimination of clear text passwords in config,
* this method attempts to resolve the property name as an alias through
- * the CredentialProvider API and conditionally fallsback to config.
+ * the CredentialProvider API and conditionally falls back to config.
* @param name property name
* @return password
*/
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java/src/test/java/org/apache/flink/api/java/io/CsvInputFormatTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/io/CsvInputFormatTest.java b/flink-java/src/test/java/org/apache/flink/api/java/io/CsvInputFormatTest.java
index 8939c5a..99c569c 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/io/CsvInputFormatTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/io/CsvInputFormatTest.java
@@ -808,7 +808,7 @@ public class CsvInputFormatTest {
}
- // Test disabled becase we do not support double-quote escaped quotes right now.
+ // Test disabled because we do not support double-quote escaped quotes right now.
// @Test
public void testParserCorrectness() throws Exception {
// RFC 4180 Compliance Test content
@@ -875,13 +875,13 @@ public class CsvInputFormatTest {
@Test
public void testWindowsLineEndRemoval() {
- //Check typical use case -- linux file is correct and it is set up to linuc(\n)
+ //Check typical use case -- linux file is correct and it is set up to linux (\n)
this.testRemovingTrailingCR("\n", "\n");
//Check typical windows case -- windows file endings and file has windows file endings set up
this.testRemovingTrailingCR("\r\n", "\r\n");
- //Check problematic case windows file -- windows file endings(\r\n) but linux line endings (\n) set up
+ //Check problematic case windows file -- windows file endings (\r\n) but linux line endings (\n) set up
this.testRemovingTrailingCR("\r\n", "\n");
//Check problematic case linux file -- linux file endings (\n) but windows file endings set up (\r\n)
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java/src/test/java/org/apache/flink/api/java/operator/MaxByOperatorTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/operator/MaxByOperatorTest.java b/flink-java/src/test/java/org/apache/flink/api/java/operator/MaxByOperatorTest.java
index b207e19..5d00aa2 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/operator/MaxByOperatorTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/operator/MaxByOperatorTest.java
@@ -69,7 +69,7 @@ public class MaxByOperatorTest {
private final List<CustomType> customTypeData = new ArrayList<CustomType>();
/**
- * This test validates that an InvalidProgrammException is thrown when maxBy
+ * This test validates that an InvalidProgramException is thrown when maxBy
* is used on a custom data type.
*/
@Test(expected = InvalidProgramException.class)
@@ -86,7 +86,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset1() {
@@ -100,7 +100,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset2() {
@@ -114,7 +114,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset3() {
@@ -147,7 +147,7 @@ public class MaxByOperatorTest {
}
/**
- * This test validates that an InvalidProgrammException is thrown when maxBy
+ * This test validates that an InvalidProgramException is thrown when maxBy
* is used on a custom data type.
*/
@Test(expected = InvalidProgramException.class)
@@ -164,7 +164,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping1() {
@@ -178,7 +178,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {
@@ -192,7 +192,7 @@ public class MaxByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java b/flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java
index 02b84fa..4a77f91 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/operator/MinByOperatorTest.java
@@ -69,7 +69,7 @@ public class MinByOperatorTest {
private final List<CustomType> customTypeData = new ArrayList<CustomType>();
/**
- * This test validates that an InvalidProgrammException is thrown when minBy
+ * This test validates that an InvalidProgramException is thrown when minBy
* is used on a custom data type.
*/
@Test(expected = InvalidProgramException.class)
@@ -86,7 +86,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset1() {
@@ -100,7 +100,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset2() {
@@ -114,7 +114,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsDataset3() {
@@ -147,7 +147,7 @@ public class MinByOperatorTest {
}
/**
- * This test validates that an InvalidProgrammException is thrown when minBy
+ * This test validates that an InvalidProgramException is thrown when minBy
* is used on a custom data type.
*/
@Test(expected = InvalidProgramException.class)
@@ -164,7 +164,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping1() {
@@ -178,7 +178,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping2() {
@@ -192,7 +192,7 @@ public class MinByOperatorTest {
/**
* This test validates that an index which is out of bounds throws an
- * IndexOutOfBOundsExcpetion.
+ * IndexOutOfBoundsException.
*/
@Test(expected = IndexOutOfBoundsException.class)
public void testOutOfTupleBoundsGrouping3() {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java8/src/main/java/org/apache/flink/examples/java8/wordcount/WordCount.java
----------------------------------------------------------------------
diff --git a/flink-java8/src/main/java/org/apache/flink/examples/java8/wordcount/WordCount.java b/flink-java8/src/main/java/org/apache/flink/examples/java8/wordcount/WordCount.java
index 0130dec..8f36f66 100644
--- a/flink-java8/src/main/java/org/apache/flink/examples/java8/wordcount/WordCount.java
+++ b/flink-java8/src/main/java/org/apache/flink/examples/java8/wordcount/WordCount.java
@@ -62,7 +62,7 @@ public class WordCount {
DataSet<Tuple2<String, Integer>> counts =
// normalize and split each line
text.map(line -> line.toLowerCase().split("\\W+"))
- // convert splitted line in pairs (2-tuples) containing: (word,1)
+ // convert split line in pairs (2-tuples) containing: (word,1)
.flatMap((String[] tokens, Collector<Tuple2<String, Integer>> out) -> {
// emit the pairs with non-zero-length words
Arrays.stream(tokens)
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java8/src/main/java/org/apache/flink/streaming/examples/java8/wordcount/WordCount.java
----------------------------------------------------------------------
diff --git a/flink-java8/src/main/java/org/apache/flink/streaming/examples/java8/wordcount/WordCount.java b/flink-java8/src/main/java/org/apache/flink/streaming/examples/java8/wordcount/WordCount.java
index f991433..b9dba77 100644
--- a/flink-java8/src/main/java/org/apache/flink/streaming/examples/java8/wordcount/WordCount.java
+++ b/flink-java8/src/main/java/org/apache/flink/streaming/examples/java8/wordcount/WordCount.java
@@ -62,7 +62,7 @@ public class WordCount {
DataStream<Tuple2<String, Integer>> counts =
// normalize and split each line
text.map(line -> line.toLowerCase().split("\\W+"))
- // convert splitted line in pairs (2-tuples) containing: (word,1)
+ // convert split line in pairs (2-tuples) containing: (word,1)
.flatMap((String[] tokens, Collector<Tuple2<String, Integer>> out) -> {
// emit the pairs with non-zero-length words
Arrays.stream(tokens)
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-java8/src/test/java/org/apache/flink/test/api/java/operators/lambdas/FilterITCase.java
----------------------------------------------------------------------
diff --git a/flink-java8/src/test/java/org/apache/flink/test/api/java/operators/lambdas/FilterITCase.java b/flink-java8/src/test/java/org/apache/flink/test/api/java/operators/lambdas/FilterITCase.java
index 345b119..6ad1058 100644
--- a/flink-java8/src/test/java/org/apache/flink/test/api/java/operators/lambdas/FilterITCase.java
+++ b/flink-java8/src/test/java/org/apache/flink/test/api/java/operators/lambdas/FilterITCase.java
@@ -28,7 +28,7 @@ import java.util.Collections;
import java.util.List;
/**
- * IT cases for lambda filter funtions.
+ * IT cases for lambda filter functions.
*/
public class FilterITCase extends JavaProgramTestBase {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java b/flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
index 2a12d37..7721653 100644
--- a/flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
+++ b/flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
@@ -133,7 +133,7 @@ public class NFATest extends TestLogger {
/**
* Tests that elements whose timestamp difference is exactly the window length are not matched.
- * The reaon is that the right window side (later elements) is exclusive.
+ * The reason is that the right window side (later elements) is exclusive.
*/
@Test
public void testWindowBorders() {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerableFactory.java
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerableFactory.java b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerableFactory.java
index ead29fc..c1a8999 100644
--- a/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerableFactory.java
+++ b/flink-libraries/flink-gelly/src/main/java/org/apache/flink/graph/generator/random/RandomGenerableFactory.java
@@ -46,7 +46,7 @@ import java.util.List;
public interface RandomGenerableFactory<T extends RandomGenerator> {
/**
- * The amount of work ({@code elementCount * cyclerPerElement}) is used to
+ * The amount of work ({@code elementCount * cyclesPerElement}) is used to
* generate a list of blocks of work of near-equal size.
*
* @param elementCount number of elements, as indexed in the {@code BlockInfo}
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/classification/SVM.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/classification/SVM.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/classification/SVM.scala
index 721dd69..6d78ef7 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/classification/SVM.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/classification/SVM.scala
@@ -35,7 +35,7 @@ import breeze.linalg.{DenseVector => BreezeDenseVector, Vector => BreezeVector}
/** Implements a soft-margin SVM using the communication-efficient distributed dual coordinate
* ascent algorithm (CoCoA) with hinge-loss function.
*
- * It can be used for binary classification problems, with the labels set as +1.0 to indiciate a
+ * It can be used for binary classification problems, with the labels set as +1.0 to indicate a
* positive example and -1.0 to indicate a negative example.
*
* The algorithm solves the following minimization problem:
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/outlier/StochasticOutlierSelection.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/outlier/StochasticOutlierSelection.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/outlier/StochasticOutlierSelection.scala
index 2c04bb0..ee82c03 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/outlier/StochasticOutlierSelection.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/outlier/StochasticOutlierSelection.scala
@@ -154,7 +154,7 @@ object StochasticOutlierSelection extends WithParameters {
new TransformDataSetOperation[StochasticOutlierSelection, LabeledVector, (Int, Double)] {
- /** Overrides the method of the parent class and applies the sochastic outlier selection
+ /** Overrides the method of the parent class and applies the stochastic outlier selection
* algorithm.
*
* @param instance Instance of the class
@@ -181,7 +181,7 @@ object StochasticOutlierSelection extends WithParameters {
}
/** [[TransformDataSetOperation]] applies the stochastic outlier selection algorithm on a
- * [[Vector]] which will transform the high-dimensionaly input to a single Double output.
+ * [[Vector]] which will transform the high-dimensional input to a single Double output.
*
* @tparam T Type of the input and output data which has to be a subtype of [[Vector]]
* @return [[TransformDataSetOperation]] a single double which represents the oulierness of
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Estimator.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Estimator.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Estimator.scala
index dbe0782..ca7cb33 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Estimator.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Estimator.scala
@@ -117,7 +117,7 @@ object Estimator{
}
/** Fallback [[TransformDataSetOperation]] for [[Transformer]] which do not support the input or
- * output type with which they are called. This is usualy the case if pipeline operators are
+ * output type with which they are called. This is usually the case if pipeline operators are
* chained which have incompatible input/output types. In order to detect these failures, the
* fallback [[TransformDataSetOperation]] throws a [[RuntimeException]] with the corresponding
* input/output types. Consequently, a wrong pipeline will be detected at pre-flight phase of
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Predictor.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Predictor.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Predictor.scala
index 9d11cff..d0f3064 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Predictor.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Predictor.scala
@@ -230,7 +230,7 @@ trait PredictOperation[Instance, Model, Testing, Prediction] extends Serializabl
/** Calculates the prediction for a single element given the model of the [[Predictor]].
*
* @param value The unlabeled example on which we make the prediction
- * @param model The model representation of the prediciton algorithm
+ * @param model The model representation of the prediction algorithm
* @return A label for the provided example of type [[Prediction]]
*/
def predict(value: Testing, model: Model):
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Transformer.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Transformer.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Transformer.scala
index 014ad2b..4b44127 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Transformer.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/pipeline/Transformer.scala
@@ -48,7 +48,7 @@ trait Transformer[Self <: Transformer[Self]]
with Serializable {
that: Self =>
- /** Transform operation which transforms an input [[DataSet]] of type I into an ouptut [[DataSet]]
+ /** Transform operation which transforms an input [[DataSet]] of type I into an output [[DataSet]]
* of type O. The actual transform operation is implemented within the
* [[TransformDataSetOperation]].
*
@@ -57,7 +57,7 @@ trait Transformer[Self <: Transformer[Self]]
* @param transformOperation [[TransformDataSetOperation]] which encapsulates the algorithm's
* logic
* @tparam Input Input data type
- * @tparam Output Ouptut data type
+ * @tparam Output Output data type
* @return
*/
def transform[Input, Output](
@@ -125,7 +125,7 @@ object Transformer{
* @tparam Instance Type of the [[Transformer]] for which the [[TransformDataSetOperation]] is
* defined
* @tparam Input Input data type
- * @tparam Output Ouptut data type
+ * @tparam Output Output data type
*/
trait TransformDataSetOperation[Instance, Input, Output] extends Serializable{
def transformDataSet(
@@ -148,10 +148,10 @@ trait TransformOperation[Instance, Model, Input, Output] extends Serializable{
/** Retrieves the model of the [[Transformer]] for which this operation has been defined.
*
* @param instance
- * @param transformParemters
+ * @param transformParameters
* @return
*/
- def getModel(instance: Instance, transformParemters: ParameterMap): DataSet[Model]
+ def getModel(instance: Instance, transformParameters: ParameterMap): DataSet[Model]
/** Transforms a single element with respect to the model associated with the respective
* [[Transformer]]
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/preprocessing/Splitter.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/preprocessing/Splitter.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/preprocessing/Splitter.scala
index 46b1462..3451c80 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/preprocessing/Splitter.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/preprocessing/Splitter.scala
@@ -140,7 +140,7 @@ object Splitter {
* @param kFolds The number of TrainTest DataSets to be returns. Each 'testing' will be
* 1/k of the dataset, randomly sampled, the training will be the remainder
* of the dataset. The DataSet is split into kFolds first, so that no
- * observation will occurin in multiple folds.
+ * observation will occuring in multiple folds.
* @param seed Random number generator seed.
* @return An array of TrainTestDataSets
*/
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/recommendation/ALS.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/recommendation/ALS.scala b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/recommendation/ALS.scala
index 0454381..2e2e35a 100644
--- a/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/recommendation/ALS.scala
+++ b/flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/recommendation/ALS.scala
@@ -40,7 +40,7 @@ import scala.util.Random
/** Alternating least squares algorithm to calculate a matrix factorization.
*
- * Given a matrix `R`, ALS calculates two matricess `U` and `V` such that `R ~~ U^TV`. The
+ * Given a matrix `R`, ALS calculates two matrices `U` and `V` such that `R ~~ U^TV`. The
* unknown row dimension is given by the number of latent factors. Since matrix factorization
* is often used in the context of recommendation, we'll call the first matrix the user and the
* second matrix the item matrix. The `i`th column of the user matrix is `u_i` and the `i`th
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/resources/tableSourceConverter.properties
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/resources/tableSourceConverter.properties b/flink-libraries/flink-table/src/main/resources/tableSourceConverter.properties
index d548f48..86a48a8 100644
--- a/flink-libraries/flink-table/src/main/resources/tableSourceConverter.properties
+++ b/flink-libraries/flink-table/src/main/resources/tableSourceConverter.properties
@@ -18,9 +18,9 @@
################################################################################
# The config file is used to specify the packages of current module where
-# to find TableSourceConverter implementation class annotationed with TableType.
+# to find TableSourceConverter implementation class annotated with TableType.
# If there are multiple packages to scan, put those packages together into a
-# string seperated with ',', for example, org.package1,org.package2.
+# string separated with ',', for example, org.package1,org.package2.
# Please notice:
# It's better to have a tableSourceConverter.properties in each connector Module
# which offers converters instead of put all information into the
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
index 6170fa1..6e00b56 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
@@ -301,7 +301,7 @@ abstract class TableEnvironment(val config: TableConfig) {
throw new ExternalCatalogAlreadyExistException(name)
}
this.externalCatalogs.put(name, externalCatalog)
- // create an external catalog calicte schema, register it on the root schema
+ // create an external catalog Calcite schema, register it on the root schema
ExternalCatalogSchema.registerCatalog(rootSchema, name, externalCatalog)
}
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableSchema.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableSchema.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableSchema.scala
index f4d928f..534ef39 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableSchema.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableSchema.scala
@@ -40,9 +40,9 @@ class TableSchema(
// check uniqueness of field names
if (columnNames.toSet.size != columnTypes.length) {
val duplicateFields = columnNames
- // count occurences of field names
+ // count occurrences of field names
.groupBy(identity).mapValues(_.length)
- // filter for occurences > 1 and map to field name
+ // filter for occurrences > 1 and map to field name
.filter(g => g._2 > 1).keys
throw new TableException(
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkTypeSystem.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkTypeSystem.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkTypeSystem.scala
index 3a195ed..99d8cab 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkTypeSystem.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/calcite/FlinkTypeSystem.scala
@@ -26,11 +26,11 @@ import org.apache.calcite.sql.`type`.SqlTypeName
*/
class FlinkTypeSystem extends RelDataTypeSystemImpl {
- // we cannot use Int.MaxValue because of an overflow in Calcites type inference logic
+ // we cannot use Int.MaxValue because of an overflow in Calcite's type inference logic
// half should be enough for all use cases
override def getMaxNumericScale: Int = Int.MaxValue / 2
- // we cannot use Int.MaxValue because of an overflow in Calcites type inference logic
+ // we cannot use Int.MaxValue because of an overflow in Calcite's type inference logic
// half should be enough for all use cases
override def getMaxNumericPrecision: Int = Int.MaxValue / 2
@@ -40,7 +40,7 @@ class FlinkTypeSystem extends RelDataTypeSystemImpl {
case SqlTypeName.VARCHAR =>
Int.MaxValue
- // we currenty support only timestamps with milliseconds precision
+ // we currently support only timestamps with milliseconds precision
case SqlTypeName.TIMESTAMP =>
3
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/CodeGenerator.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/CodeGenerator.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/CodeGenerator.scala
index df7ef57..3a43544 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/CodeGenerator.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/CodeGenerator.scala
@@ -244,7 +244,7 @@ abstract class CodeGenerator(
* @param returnType conversion target type. Inputs and output must have the same arity.
* @param resultFieldNames result field names necessary for a mapping to POJO fields.
* @param rowtimeExpression an expression to extract the value of a rowtime field from
- * the input data. Required if the field indicies include a rowtime
+ * the input data. Required if the field indices include a rowtime
* marker.
* @return instance of GeneratedExpression
*/
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala
index e41b876..4c01c1c 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/ScalarFunction.scala
@@ -61,7 +61,7 @@ abstract class ScalarFunction extends UserDefinedFunction {
/**
* Returns the result type of the evaluation method with a given signature.
*
- * This method needs to be overriden in case Flink's type extraction facilities are not
+ * This method needs to be overridden in case Flink's type extraction facilities are not
* sufficient to extract the [[TypeInformation]] based on the return type of the evaluation
* method. Flink's type extraction facilities can handle basic types or
* simple POJOs but might be wrong for more complex, custom, or composite types.
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/TableFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/TableFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/TableFunction.scala
index ff69954..d80ec47 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/TableFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/TableFunction.scala
@@ -111,7 +111,7 @@ abstract class TableFunction[T] extends UserDefinedFunction {
/**
* Returns the result type of the evaluation method with a given signature.
*
- * This method needs to be overriden in case Flink's type extraction facilities are not
+ * This method needs to be overridden in case Flink's type extraction facilities are not
* sufficient to extract the [[TypeInformation]] based on the return type of the evaluation
* method. Flink's type extraction facilities can handle basic types or
* simple POJOs but might be wrong for more complex, custom, or composite types.
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
index 4b1e921..241e511 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
@@ -58,7 +58,7 @@ class AggSqlFunction(
createReturnTypeInference(returnType, typeFactory),
createOperandTypeInference(aggregateFunction, typeFactory),
createOperandTypeChecker(aggregateFunction),
- // Do not need to provide a calcite aggregateFunction here. Flink aggregateion function
+ // Do not need to provide a calcite aggregateFunction here. Flink aggregation function
// will be generated when translating the calcite relnode to flink runtime execution plan
null,
false,
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
index 4a34732..c2eabae 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
@@ -632,7 +632,7 @@ object UserDefinedFunctionUtils {
/**
* Creates a [[LogicalTableFunctionCall]] by parsing a String expression.
*
- * @param tableEnv The table environmenent to lookup the function.
+ * @param tableEnv The table environment to lookup the function.
* @param udtf a String expression of a TableFunctionCall, such as "split(c)"
* @return A LogicalTableFunctionCall.
*/
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/CommonCorrelate.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/CommonCorrelate.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/CommonCorrelate.scala
index c53f090..4331457 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/CommonCorrelate.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/CommonCorrelate.scala
@@ -143,7 +143,7 @@ trait CommonCorrelate {
|""".stripMargin
} else {
- // adjust indicies of InputRefs to adhere to schema expected by generator
+ // adjust indices of InputRefs to adhere to schema expected by generator
val changeInputRefIndexShuttle = new RexShuttle {
override def visitInputRef(inputRef: RexInputRef): RexNode = {
new RexInputRef(inputSchema.arity + inputRef.getIndex, inputRef.getType)
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala
index e461c57..56bf8ea 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala
@@ -443,11 +443,11 @@ class DataSetJoin(
s"join: (${joinSelectionToString(joinRowType)})"
}
- /** Returns an array of indicies with some indicies being a prefix. */
+ /** Returns an array of indices with some indices being a prefix. */
private def getFullIndiciesWithPrefix(keys: Array[Int], numFields: Int): Array[Int] = {
- // get indicies of all fields which are not keys
+ // get indices of all fields which are not keys
val nonKeys = (0 until numFields).filter(!keys.contains(_))
- // return all field indicies prefixed by keys
+ // return all field indices prefixed by keys
keys ++ nonKeys
}
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/retractionTraits.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/retractionTraits.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/retractionTraits.scala
index 173b7d3..b1c9222 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/retractionTraits.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/datastream/retractionTraits.scala
@@ -98,13 +98,14 @@ object AccMode extends Enumeration {
* Changes are encoded as follows:
* - insert: (true, NewRow)
* - update: (true, NewRow) // the Row includes the full unique key to identify the row to update
- * - delete: (false, OldRow) // the Row includes the full unique key to idenify the row to delete
+ * - delete: (false, OldRow) // the Row includes the full unique key to identify the row to
+ * delete
*
*/
val Acc = Value
/**
- * * An operator in [[AccRetract]] mode emits change messages as
+ * An operator in [[AccRetract]] mode emits change messages as
* [[org.apache.flink.table.runtime.types.CRow]] which encode a pair of (Boolean, Row).
*
* Changes are encoded as follows:
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/schema/InlineTable.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/schema/InlineTable.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/schema/InlineTable.scala
index 7e61fdf..84f1d11 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/schema/InlineTable.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/schema/InlineTable.scala
@@ -46,9 +46,9 @@ abstract class InlineTable[T](
// check uniqueness of field names
if (fieldNames.length != fieldNames.toSet.size) {
val duplicateFields = fieldNames
- // count occurences of field names
+ // count occurrences of field names
.groupBy(identity).mapValues(_.length)
- // filter for occurences > 1 and map to field name
+ // filter for occurrences > 1 and map to field name
.filter(g => g._2 > 1).keys
throw new TableException(
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateTimeWindowFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateTimeWindowFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateTimeWindowFunction.scala
index a908f49..3156615 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateTimeWindowFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateTimeWindowFunction.scala
@@ -27,7 +27,7 @@ import org.apache.flink.table.runtime.types.CRow
import org.apache.flink.util.Collector
/**
- * Computes the final aggregate value from incrementally computed aggreagtes.
+ * Computes the final aggregate value from incrementally computed aggregates.
*
* @param numGroupingKey the number of grouping keys
* @param numAggregates the number of aggregates
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala
index c9fa0c9..4ec6407 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/IncrementalAggregateWindowFunction.scala
@@ -27,7 +27,7 @@ import org.apache.flink.types.Row
import org.apache.flink.util.Collector
/**
- * Computes the final aggregate value from incrementally computed aggreagtes.
+ * Computes the final aggregate value from incrementally computed aggregates.
*
* @param numGroupingKey The number of grouping keys.
* @param numAggregates The number of aggregates.
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeSortProcessFunction.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeSortProcessFunction.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeSortProcessFunction.scala
index 0d69355..f40feb1 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeSortProcessFunction.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeSortProcessFunction.scala
@@ -30,7 +30,7 @@ import org.apache.flink.types.Row
import org.apache.flink.util.{Collector, Preconditions}
/**
- * ProcessFunction to sort on event-time and possibly addtional secondary sort attributes.
+ * ProcessFunction to sort on event-time and possibly additional secondary sort attributes.
*
* @param inputRowType The data type of the input data.
* @param rowtimeIdx The index of the rowtime field.
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeUnboundedOver.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeUnboundedOver.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeUnboundedOver.scala
index 27d307b..181c768 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeUnboundedOver.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/aggregate/RowTimeUnboundedOver.scala
@@ -192,7 +192,7 @@ abstract class RowTimeUnboundedOver(
val curTimestamp = sortedTimestamps.removeFirst()
val curRowList = rowMapState.get(curTimestamp)
- // process the same timestamp datas, the mechanism is different according ROWS or RANGE
+ // process the same timestamp data, the mechanism is different according ROWS or RANGE
processElementsWithSameTimestamp(curRowList, lastAccumulator, out)
rowMapState.remove(curTimestamp)
@@ -234,7 +234,7 @@ abstract class RowTimeUnboundedOver(
}
/**
- * Process the same timestamp datas, the mechanism is different between
+ * Process the same timestamp data, the mechanism is different between
* rows and range window.
*/
def processElementsWithSameTimestamp(
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/WindowJoinUtil.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/WindowJoinUtil.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/WindowJoinUtil.scala
index 7006476..18e26df 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/WindowJoinUtil.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/join/WindowJoinUtil.scala
@@ -420,7 +420,7 @@ object WindowJoinUtil {
* Generates a JoinFunction that applies additional join predicates and projects the result.
*
* @param config table env config
- * @param joinType join type to determain whether input can be null
+ * @param joinType join type to determine whether input can be null
* @param leftType left stream type
* @param rightType right stream type
* @param returnType return type
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/CsvTableSource.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/CsvTableSource.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/CsvTableSource.scala
index 659a6cd..ba076b4 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/CsvTableSource.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/CsvTableSource.scala
@@ -153,7 +153,6 @@ class CsvTableSource private (
override def projectFields(fields: Array[Int]): CsvTableSource = {
val selectedFields = if (fields.isEmpty) Array(0) else fields
-// val selectedFiels = fields
new CsvTableSource(
path,
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala
index 6895419..5cb7e90 100644
--- a/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala
+++ b/flink-libraries/flink-table/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala
@@ -243,7 +243,7 @@ object TableSourceUtil {
* Returns the Calcite schema of a [[TableSource]].
*
* @param tableSource The [[TableSource]] for which the Calcite schema is generated.
- * @param selectedFields The indicies of all selected fields. None, if all fields are selected.
+ * @param selectedFields The indices of all selected fields. None, if all fields are selected.
* @param streaming Flag to determine whether the schema of a stream or batch table is created.
* @param typeFactory The type factory to create the schema.
* @return The Calcite schema for the selected fields of the given [[TableSource]].
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/RowTypeTest.scala
----------------------------------------------------------------------
diff --git a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/RowTypeTest.scala b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/RowTypeTest.scala
index abe3ae2..df84a84 100644
--- a/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/RowTypeTest.scala
+++ b/flink-libraries/flink-table/src/test/scala/org/apache/flink/table/expressions/RowTypeTest.scala
@@ -49,7 +49,7 @@ class RowTypeTest extends RowTypeTestBase {
"Map('foo', 'bar'), row(1, true))",
"ROW(DATE '1985-04-11', CAST(0.1 AS DECIMAL), ARRAY[1, 2, 3], " +
"MAP['foo', 'bar'], row(1, true))",
- "1985-04-11,0.1,[1, 2, 3],{foo=bar},1,true") // string faltten
+ "1985-04-11,0.1,[1, 2, 3],{foo=bar},1,true") // string flatten
testAllApis(
row(1 + 1, 2 * 3, Null(Types.STRING)),
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-mesos/src/main/java/org/apache/flink/mesos/entrypoint/MesosEntrypointUtils.java
----------------------------------------------------------------------
diff --git a/flink-mesos/src/main/java/org/apache/flink/mesos/entrypoint/MesosEntrypointUtils.java b/flink-mesos/src/main/java/org/apache/flink/mesos/entrypoint/MesosEntrypointUtils.java
index 368d62d..af72c96 100755
--- a/flink-mesos/src/main/java/org/apache/flink/mesos/entrypoint/MesosEntrypointUtils.java
+++ b/flink-mesos/src/main/java/org/apache/flink/mesos/entrypoint/MesosEntrypointUtils.java
@@ -42,7 +42,7 @@ import scala.concurrent.duration.Duration;
import scala.concurrent.duration.FiniteDuration;
/**
- * Utils for Mesos entrpoints.
+ * Utils for Mesos entry points.
*/
public class MesosEntrypointUtils {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/AbstractMesosServices.java
----------------------------------------------------------------------
diff --git a/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/AbstractMesosServices.java b/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/AbstractMesosServices.java
index e4f4cf7..63f371d 100644
--- a/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/AbstractMesosServices.java
+++ b/flink-mesos/src/main/java/org/apache/flink/mesos/runtime/clusterframework/services/AbstractMesosServices.java
@@ -27,7 +27,7 @@ import akka.actor.ActorSystem;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
- * An abrstact implementation of {@link MesosServices}.
+ * An abstract implementation of {@link MesosServices}.
*/
public abstract class AbstractMesosServices implements MesosServices {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-metrics/flink-metrics-jmx/src/main/java/org/apache/flink/metrics/jmx/JMXReporter.java
----------------------------------------------------------------------
diff --git a/flink-metrics/flink-metrics-jmx/src/main/java/org/apache/flink/metrics/jmx/JMXReporter.java b/flink-metrics/flink-metrics-jmx/src/main/java/org/apache/flink/metrics/jmx/JMXReporter.java
index 1cc7d38..461f1dc 100644
--- a/flink-metrics/flink-metrics-jmx/src/main/java/org/apache/flink/metrics/jmx/JMXReporter.java
+++ b/flink-metrics/flink-metrics-jmx/src/main/java/org/apache/flink/metrics/jmx/JMXReporter.java
@@ -85,7 +85,7 @@ public class JMXReporter implements MetricReporter {
/** The names under which the registered metrics have been added to the MBeanServer. */
private final Map<Metric, ObjectName> registeredMetrics;
- /** The server to which JMX clients connect to. ALlows for better control over port usage. */
+ /** The server to which JMX clients connect to. Allows for better control over port usage. */
private JMXServer jmxServer;
public JMXReporter() {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java
----------------------------------------------------------------------
diff --git a/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java b/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java
index b3b7cf9..0136e20 100644
--- a/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java
+++ b/flink-optimizer/src/main/java/org/apache/flink/optimizer/traversals/PlanFinalizer.java
@@ -209,7 +209,7 @@ public class PlanFinalizer implements Visitor<PlanNode> {
}
}
- // pass the visitor to the iteraton's step function
+ // pass the visitor to the iteration's step function
if (visitable instanceof IterationPlanNode) {
// push the iteration node onto the stack
final IterationPlanNode iterNode = (IterationPlanNode) visitable;
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-optimizer/src/test/java/org/apache/flink/optimizer/UnionPropertyPropagationTest.java
----------------------------------------------------------------------
diff --git a/flink-optimizer/src/test/java/org/apache/flink/optimizer/UnionPropertyPropagationTest.java b/flink-optimizer/src/test/java/org/apache/flink/optimizer/UnionPropertyPropagationTest.java
index fefc627..7248c10 100644
--- a/flink-optimizer/src/test/java/org/apache/flink/optimizer/UnionPropertyPropagationTest.java
+++ b/flink-optimizer/src/test/java/org/apache/flink/optimizer/UnionPropertyPropagationTest.java
@@ -136,7 +136,7 @@ public class UnionPropertyPropagationTest extends CompilerTestBase {
}
/* Test on the union input connections
- * Must be NUM_INPUTS input connections, all FlatMapOperators with a own partitioning strategy(propably hash)
+ * Must be NUM_INPUTS input connections, all FlatMapOperators with a own partitioning strategy (probably hash)
*/
if (visitable instanceof NAryUnionPlanNode) {
int numberInputs = 0;
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/RedirectHandlerTest.java
----------------------------------------------------------------------
diff --git a/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/RedirectHandlerTest.java b/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/RedirectHandlerTest.java
index 3a976e4..98dc20a 100644
--- a/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/RedirectHandlerTest.java
+++ b/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/RedirectHandlerTest.java
@@ -59,7 +59,7 @@ public class RedirectHandlerTest extends TestLogger {
* Tests the behaviour of the RedirectHandler under the following conditions.
*
* <p>1. No local address known --> service unavailable
- * 2. Local address knwon but no gateway resolved --> service unavailable
+ * 2. Local address known but no gateway resolved --> service unavailable
* 3. Remote leader gateway --> redirection
* 4. Local leader gateway
* @throws Exception
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerStaticFileServerHandlerTest.java
----------------------------------------------------------------------
diff --git a/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerStaticFileServerHandlerTest.java b/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerStaticFileServerHandlerTest.java
index 066de74..23f0f53 100644
--- a/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerStaticFileServerHandlerTest.java
+++ b/flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/history/HistoryServerStaticFileServerHandlerTest.java
@@ -55,7 +55,7 @@ public class HistoryServerStaticFileServerHandlerTest {
int port = webUI.getServerPort();
try {
- // verify that 404 message is returned when requesting a non-existant file
+ // verify that 404 message is returned when requesting a non-existent file
String notFound404 = HistoryServerTest.getFromHTTP("http://localhost:" + port + "/hello");
Assert.assertTrue(notFound404.contains("404 Not Found"));
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime-web/web-dashboard/vendor-local/d3-timeline.js
----------------------------------------------------------------------
diff --git a/flink-runtime-web/web-dashboard/vendor-local/d3-timeline.js b/flink-runtime-web/web-dashboard/vendor-local/d3-timeline.js
index aadca55..be02f32 100644
--- a/flink-runtime-web/web-dashboard/vendor-local/d3-timeline.js
+++ b/flink-runtime-web/web-dashboard/vendor-local/d3-timeline.js
@@ -69,7 +69,7 @@
.attr("clip-path", "url(#" + prefix + "-gclip" + ")")
// check if the user wants relative time
- // if so, substract the first timestamp from each subsequent timestamps
+ // if so, subtract the first timestamp from each subsequent timestamps
if(timeIsRelative){
g.each(function (d, i) {
d.forEach(function (datum, index) {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/akka/FlinkUntypedActor.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/akka/FlinkUntypedActor.java b/flink-runtime/src/main/java/org/apache/flink/runtime/akka/FlinkUntypedActor.java
index 8078c26..e0279b3 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/akka/FlinkUntypedActor.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/akka/FlinkUntypedActor.java
@@ -49,7 +49,7 @@ public abstract class FlinkUntypedActor extends UntypedActor {
* processing time of the incoming message if the logging level is set to debug. After logging
* the handleLeaderSessionID method is called.
*
- * <p>Important: This method cannot be overriden. The actor specific message handling logic is
+ * <p>Important: This method cannot be overridden. The actor specific message handling logic is
* implemented by the method handleMessage.
*
* @param message Incoming message
@@ -124,7 +124,7 @@ public abstract class FlinkUntypedActor extends UntypedActor {
protected abstract void handleMessage(Object message) throws Exception;
/**
- * Returns the current leader session ID associcated with this actor.
+ * Returns the current leader session ID associated with this actor.
* @return
*/
protected abstract UUID getLeaderSessionID();
@@ -134,10 +134,10 @@ public abstract class FlinkUntypedActor extends UntypedActor {
* a leader session ID (indicated by {@link RequiresLeaderSessionID}) in a
* {@link LeaderSessionMessage} with the actor's leader session ID.
*
- * <p>This method can be overriden to implement a different decoration behavior.
+ * <p>This method can be overridden to implement a different decoration behavior.
*
* @param message Message to be decorated
- * @return The deocrated message
+ * @return The decorated message
*/
protected Object decorateMessage(Object message) {
if (message instanceof RequiresLeaderSessionID) {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/blob/PermanentBlobCache.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/blob/PermanentBlobCache.java b/flink-runtime/src/main/java/org/apache/flink/runtime/blob/PermanentBlobCache.java
index ae59f59..83cb18e 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/blob/PermanentBlobCache.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/blob/PermanentBlobCache.java
@@ -261,7 +261,7 @@ public class PermanentBlobCache extends AbstractBlobCache implements PermanentBl
/*
* NOTE: normally it is not required to acquire the write lock to delete the job's
- * storage directory since there should be noone accessing it with the ref
+ * storage directory since there should be no one accessing it with the ref
* counter being 0 - acquire it just in case, to always be on the safe side
*/
readWriteLock.writeLock().lock();
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java b/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java
index 9a4456e..824563f 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointCoordinator.java
@@ -946,7 +946,7 @@ public class CheckpointCoordinator {
* Fails all pending checkpoints which have not been acknowledged by the given execution
* attempt id.
*
- * @param executionAttemptId for which to discard unaknowledged pending checkpoints
+ * @param executionAttemptId for which to discard unacknowledged pending checkpoints
* @param cause of the failure
*/
public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/PendingCheckpointStats.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/PendingCheckpointStats.java b/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/PendingCheckpointStats.java
index 0f32250..59b404a 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/PendingCheckpointStats.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/PendingCheckpointStats.java
@@ -56,7 +56,7 @@ public class PendingCheckpointStats extends AbstractCheckpointStats {
/** Current buffered bytes during alignment over all collected subtasks. */
private volatile long currentAlignmentBuffered;
- /** Stats of the latest acknowleged subtask. */
+ /** Stats of the latest acknowledged subtask. */
private volatile SubtaskStateStats latestAcknowledgedSubtask;
/**
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/client/JobListeningContext.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/client/JobListeningContext.java b/flink-runtime/src/main/java/org/apache/flink/runtime/client/JobListeningContext.java
index eb045c0..5ce0de8 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/client/JobListeningContext.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/client/JobListeningContext.java
@@ -114,7 +114,7 @@ public final class JobListeningContext {
}
/**
- * @return The Job Client actor which communicats with the JobManager.
+ * @return The Job Client actor which communicates with the JobManager.
*/
public ActorRef getJobClientActor() {
return jobClientActor;
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java
index d1efd77..ecfbc60 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/BootstrapTools.java
@@ -353,7 +353,7 @@ public class BootstrapTools {
/**
* Generates the shell command to start a task manager.
* @param flinkConfig The Flink configuration.
- * @param tmParams Paramaters for the task manager.
+ * @param tmParams Parameters for the task manager.
* @param configDirectory The configuration directory for the flink-conf.yaml
* @param logDirectory The log directory.
* @param hasLogback Uses logback?
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/FlinkResourceManager.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/FlinkResourceManager.java b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/FlinkResourceManager.java
index f9c39c1..e20dd9b 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/FlinkResourceManager.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/FlinkResourceManager.java
@@ -738,7 +738,7 @@ public abstract class FlinkResourceManager<WorkerType extends ResourceIDRetrieva
* Starts the resource manager actors.
* @param configuration The configuration for the resource manager
* @param actorSystem The actor system to start the resource manager in
- * @param leaderRetriever The leader retriever service to intialize the resource manager
+ * @param leaderRetriever The leader retriever service to initialize the resource manager
* @param resourceManagerClass The class of the ResourceManager to be started
* @return ActorRef of the resource manager
*/
@@ -757,7 +757,7 @@ public abstract class FlinkResourceManager<WorkerType extends ResourceIDRetrieva
* Starts the resource manager actors.
* @param configuration The configuration for the resource manager
* @param actorSystem The actor system to start the resource manager in
- * @param leaderRetriever The leader retriever service to intialize the resource manager
+ * @param leaderRetriever The leader retriever service to initialize the resource manager
* @param resourceManagerClass The class of the ResourceManager to be started
* @param resourceManagerActorName The name of the resource manager actor.
* @return ActorRef of the resource manager
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/HadoopConfOverlay.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/HadoopConfOverlay.java b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/HadoopConfOverlay.java
index bd79218..c45cd02 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/HadoopConfOverlay.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/HadoopConfOverlay.java
@@ -40,7 +40,7 @@ import java.io.IOException;
* The following environment variables are set in the container:
* - HADOOP_CONF_DIR
*
- * The folloowing Flink configuration entries are updated:
+ * The following Flink configuration entries are updated:
* - fs.hdfs.hadoopconf
*/
public class HadoopConfOverlay implements ContainerOverlay {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/KeytabOverlay.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/KeytabOverlay.java b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/KeytabOverlay.java
index 271b32d..c8c87d4 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/KeytabOverlay.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/overlays/KeytabOverlay.java
@@ -33,7 +33,7 @@ import java.io.IOException;
/**
* Overlays cluster-level Kerberos credentials (i.e. keytab) into a container.
*
- * The folloowing Flink configuration entries are updated:
+ * The following Flink configuration entries are updated:
* - security.kerberos.login.keytab
*/
public class KeytabOverlay extends AbstractContainerOverlay {
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/ResourceProfile.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/ResourceProfile.java b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/ResourceProfile.java
index 3dec3f3..6eb9af4 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/ResourceProfile.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/clusterframework/types/ResourceProfile.java
@@ -76,7 +76,7 @@ public class ResourceProfile implements Serializable, Comparable<ResourceProfile
* @param heapMemoryInMB The size of the heap memory, in megabytes.
* @param directMemoryInMB The size of the direct memory, in megabytes.
* @param nativeMemoryInMB The size of the native memory, in megabytes.
- * @param extendedResources The extendiable resources such as GPU and FPGA
+ * @param extendedResources The extended resources such as GPU and FPGA
*/
public ResourceProfile(
double cpuCores,
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecutionGraph.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecutionGraph.java b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecutionGraph.java
index ebc0768..c38f818 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecutionGraph.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/AccessExecutionGraph.java
@@ -50,7 +50,7 @@ public interface AccessExecutionGraph {
JobID getJobID();
/**
- * Returns the job name for thie execution graph.
+ * Returns the job name for the execution graph.
*
* @return job name for this execution graph
*/
@@ -90,7 +90,7 @@ public interface AccessExecutionGraph {
/**
* Returns an iterable containing all job vertices for this execution graph in the order they were created.
*
- * @return iterable containing all job vertices for this execution graph in the order they were creater
+ * @return iterable containing all job vertices for this execution graph in the order they were created
*/
Iterable<? extends AccessExecutionJobVertex> getVerticesTopologically();
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
index cc35060..367d02c 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
@@ -782,7 +782,7 @@ public class Execution implements AccessExecution, Archiveable<ArchivedExecution
* @param sampleId of the stack trace sample
* @param numSamples the sample should contain
* @param delayBetweenSamples to wait
- * @param maxStrackTraceDepth of the samples
+ * @param maxStackTraceDepth of the samples
* @param timeout until the request times out
* @return Future stack trace sample response
*/
@@ -790,7 +790,7 @@ public class Execution implements AccessExecution, Archiveable<ArchivedExecution
int sampleId,
int numSamples,
Time delayBetweenSamples,
- int maxStrackTraceDepth,
+ int maxStackTraceDepth,
Time timeout) {
final LogicalSlot slot = assignedResource;
@@ -803,7 +803,7 @@ public class Execution implements AccessExecution, Archiveable<ArchivedExecution
sampleId,
numSamples,
delayBetweenSamples,
- maxStrackTraceDepth,
+ maxStackTraceDepth,
timeout);
} else {
return FutureUtils.completedExceptionally(new Exception("The execution has no slot assigned."));
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java
index cb4f2c8..ef46086 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionVertex.java
@@ -447,7 +447,7 @@ public class ExecutionVertex implements AccessExecutionVertex, Archiveable<Archi
* <li>Repeated executions of stateful tasks try to co-locate the execution with its state.
* </ul>
*
- * @return The preferred excution locations for the execution attempt.
+ * @return The preferred execution locations for the execution attempt.
*
* @see #getPreferredLocationsBasedOnState()
* @see #getPreferredLocationsBasedOnInputs()
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/FsNegativeRunningJobsRegistry.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/FsNegativeRunningJobsRegistry.java b/flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/FsNegativeRunningJobsRegistry.java
index cb79a65..d29dcec 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/FsNegativeRunningJobsRegistry.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/FsNegativeRunningJobsRegistry.java
@@ -31,7 +31,7 @@ import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* This {@link RunningJobsRegistry} tracks the status jobs via marker files,
- * marking running jobs viarunning marker files, marking finished jobs via finished marker files.
+ * marking running jobs via running marker files, marking finished jobs via finished marker files.
*
* <p>The general contract is the following:
* <ul>
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/AbstractIterativeTask.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/AbstractIterativeTask.java b/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/AbstractIterativeTask.java
index bde358c..a36fc57 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/AbstractIterativeTask.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/iterative/task/AbstractIterativeTask.java
@@ -97,7 +97,7 @@ public abstract class AbstractIterativeTask<S extends Function, OT> extends Batc
// check if the driver is resettable
if (this.driver instanceof ResettableDriver) {
final ResettableDriver<?, ?> resDriver = (ResettableDriver<?, ?>) this.driver;
- // make sure that the according inputs are not reseted
+ // make sure that the according inputs are not reset
for (int i = 0; i < resDriver.getNumberOfInputs(); i++) {
if (resDriver.isInputResettable(i)) {
excludeFromReset(i);
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java b/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java
index 4f52895..1fe95eb 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobVertex.java
@@ -68,7 +68,7 @@ public class JobVertex implements java.io.Serializable {
/** Number of subtasks to split this task into at runtime.*/
private int parallelism = ExecutionConfig.PARALLELISM_DEFAULT;
- /** Maximum number of subtasks to split this taks into a runtime. */
+ /** Maximum number of subtasks to split this task into a runtime. */
private int maxParallelism = -1;
/** The minimum resource of the vertex */
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotContext.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotContext.java b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotContext.java
index 65bf2a1..3878167 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotContext.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/SlotContext.java
@@ -32,7 +32,7 @@ public interface SlotContext {
* Gets the id under which the slot has been allocated on the TaskManager. This id uniquely identifies the
* physical slot.
*
- * @return The id under whic teh slot has been allocated on the TaskManager
+ * @return The id under which the slot has been allocated on the TaskManager
*/
AllocationID getAllocationId();
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolGateway.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolGateway.java b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolGateway.java
index d3b51f7..7a627b4 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolGateway.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/SlotPoolGateway.java
@@ -108,7 +108,7 @@ public interface SlotPoolGateway extends AllocatedSlotActions, RpcGateway {
* individually accepted or rejected by returning the collection of accepted
* slot offers.
*
- * @param taskManagerLocation from which the slot offeres originate
+ * @param taskManagerLocation from which the slot offers originate
* @param taskManagerGateway to talk to the slot offerer
* @param offers slot offers which are offered to the {@link SlotPool}
* @return A collection of accepted slot offers (future). The remaining slot offers are
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
index 87b0a76..be81877 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java
@@ -131,7 +131,7 @@ public class BatchTask<S extends Function, OT> extends AbstractInvokable impleme
protected int[] iterativeInputs;
/**
- * The indices of the iterative broadcast inputs. Empty, if non of the inputs is iteratve.
+ * The indices of the iterative broadcast inputs. Empty, if non of the inputs is iterative.
*/
protected int[] iterativeBroadcastInputs;
@@ -184,13 +184,13 @@ public class BatchTask<S extends Function, OT> extends AbstractInvokable impleme
/**
* Certain inputs may be excluded from resetting. For example, the initial partial solution
- * in an iteration head must not be reseted (it is read through the back channel), when all
- * others are reseted.
+ * in an iteration head must not be reset (it is read through the back channel), when all
+ * others are reset.
*/
private boolean[] excludeFromReset;
/**
- * Flag indicating for each input whether it is cached and can be reseted.
+ * Flag indicating for each input whether it is cached and can be reset.
*/
private boolean[] inputIsCached;
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupDriver.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupDriver.java b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupDriver.java
index c3bd492..7ed86bf 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupDriver.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/CoGroupDriver.java
@@ -122,14 +122,14 @@ public class CoGroupDriver<IT1, IT2, OT> implements Driver<CoGroupFunction<IT1,
}
if (objectReuseEnabled) {
- // create CoGropuTaskIterator according to provided local strategy.
+ // create CoGroupTaskIterator according to provided local strategy.
this.coGroupIterator = new ReusingSortMergeCoGroupIterator<IT1, IT2>(
in1, in2,
serializer1, groupComparator1,
serializer2, groupComparator2,
pairComparatorFactory.createComparator12(groupComparator1, groupComparator2));
} else {
- // create CoGropuTaskIterator according to provided local strategy.
+ // create CoGroupTaskIterator according to provided local strategy.
this.coGroupIterator = new NonReusingSortMergeCoGroupIterator<IT1, IT2>(
in1, in2,
serializer1, groupComparator1,
http://git-wip-us.apache.org/repos/asf/flink/blob/3bc293ef/flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/InPlaceMutableHashTable.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/InPlaceMutableHashTable.java b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/InPlaceMutableHashTable.java
index f46fcfb..bfc9aec 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/InPlaceMutableHashTable.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/hash/InPlaceMutableHashTable.java
@@ -628,7 +628,7 @@ public class InPlaceMutableHashTable<T> extends AbstractMutableHashTable<T> {
}
/**
- * Overwrites a record at the sepcified position. The record is read from a DataInputView (this will be the staging area).
+ * Overwrites a record at the specified position. The record is read from a DataInputView (this will be the staging area).
* WARNING: The record must not be larger than the original record.
* @param pointer Points to the position to overwrite.
* @param input The DataInputView to read the record from