You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by fh...@apache.org on 2015/10/29 13:22:42 UTC
[09/13] flink git commit: [FLINK-2559] Clean up JavaDocs
[FLINK-2559] Clean up JavaDocs
- Remove broken HTML tags like <br/>, <p/>, ...
- close unclosed HTML tags
- replaces special chars by HTML escaping, e.g., '<' by <
- wrap code examples by {@code}
- fix incorrect @see and @link references
- fix incorrect @throws
- fix typos
This closes #1298
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/680b5a90
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/680b5a90
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/680b5a90
Branch: refs/heads/master
Commit: 680b5a90de2428b4b6b986f7b40409c2d1d181a7
Parents: ec7bf50
Author: Hubert Czerpak <hu...@gmail.com>
Authored: Thu Oct 22 22:51:25 2015 +0100
Committer: Fabian Hueske <fh...@apache.org>
Committed: Thu Oct 29 10:52:22 2015 +0100
----------------------------------------------------------------------
.../org/apache/flink/client/CliFrontend.java | 3 +-
.../storm/excamation/ExclamationLocal.java | 13 +-
.../storm/excamation/ExclamationTopology.java | 13 +-
.../storm/excamation/ExclamationWithBolt.java | 13 +-
.../storm/excamation/ExclamationWithSpout.java | 13 +-
.../flink/storm/split/SpoutSplitExample.java | 4 +-
.../storm/wordcount/BoltTokenizerWordCount.java | 9 +-
.../wordcount/BoltTokenizerWordCountPojo.java | 9 +-
.../BoltTokenizerWordCountWithNames.java | 9 +-
.../storm/wordcount/SpoutSourceWordCount.java | 11 +-
.../flink/storm/wordcount/WordCountLocal.java | 11 +-
.../storm/wordcount/WordCountLocalByName.java | 11 +-
.../wordcount/WordCountRemoteByClient.java | 11 +-
.../wordcount/WordCountRemoteBySubmitter.java | 11 +-
.../storm/wordcount/WordCountTopology.java | 9 +-
.../org/apache/flink/storm/api/FlinkClient.java | 2 +-
.../flink/storm/api/FlinkTopologyBuilder.java | 4 +-
.../flink/storm/util/SplitStreamType.java | 3 +-
.../flink/storm/wrappers/BoltWrapper.java | 4 +-
.../flink/storm/wrappers/SpoutWrapper.java | 10 +-
.../model/tweet/entities/Entities.java | 2 +-
.../model/tweet/entities/HashTags.java | 2 +-
.../model/tweet/entities/Media.java | 2 +-
.../model/tweet/entities/Symbol.java | 2 +-
.../model/tweet/entities/URL.java | 2 +-
.../model/tweet/entities/UserMention.java | 2 +-
.../flink/api/common/ExecutionConfig.java | 2 +-
.../apache/flink/api/common/ExecutionMode.java | 8 +-
.../api/common/accumulators/Histogram.java | 2 +-
.../api/common/aggregators/Aggregator.java | 2 +-
.../common/distributions/DataDistribution.java | 2 +-
.../api/common/functions/CoGroupFunction.java | 4 +-
.../api/common/functions/CrossFunction.java | 4 +-
.../api/common/functions/FilterFunction.java | 4 +-
.../api/common/functions/FlatJoinFunction.java | 4 +-
.../api/common/functions/FlatMapFunction.java | 4 +-
.../api/common/functions/FoldFunction.java | 4 +-
.../common/functions/GroupReduceFunction.java | 4 +-
.../api/common/functions/JoinFunction.java | 4 +-
.../flink/api/common/functions/MapFunction.java | 4 +-
.../common/functions/MapPartitionFunction.java | 4 +-
.../api/common/functions/ReduceFunction.java | 4 +-
.../functions/RichGroupReduceFunction.java | 2 +-
.../flink/api/common/io/FileOutputFormat.java | 2 +-
.../common/operators/AbstractUdfOperator.java | 2 +-
.../api/common/typeutils/TypeComparator.java | 8 +-
.../flink/configuration/ConfigConstants.java | 2 +-
.../org/apache/flink/core/fs/FileSystem.java | 18 ++-
.../apache/flink/core/memory/MemorySegment.java | 4 +-
.../java/org/apache/flink/types/Record.java | 2 +-
.../org/apache/flink/types/StringValue.java | 4 +-
.../java/org/apache/flink/util/Visitable.java | 4 +-
.../flink/examples/java/clustering/KMeans.java | 4 +-
.../clustering/util/KMeansDataGenerator.java | 2 +
.../flink/examples/java/distcp/DistCp.java | 2 +-
.../examples/java/graph/PageRankBasic.java | 2 +-
.../examples/java/ml/LinearRegression.java | 4 +-
.../examples/java/relational/TPCHQuery10.java | 4 +-
.../examples/java/relational/TPCHQuery3.java | 4 +-
.../java/relational/WebLogAnalysis.java | 8 +-
.../examples/java/wordcount/PojoExample.java | 2 +-
.../examples/java/wordcount/WordCount.java | 2 +-
.../examples/java/wordcount/WordCountMeta.java | 4 +-
.../java/org/apache/flink/api/java/DataSet.java | 124 +++++++++----------
.../java/org/apache/flink/api/java/Utils.java | 1 -
.../api/java/functions/FunctionAnnotation.java | 30 ++---
.../java/hadoop/mapred/HadoopInputFormat.java | 2 +-
.../java/hadoop/mapred/HadoopOutputFormat.java | 2 +-
.../flink/api/java/io/SplitDataProperties.java | 16 +--
.../api/java/operators/CoGroupOperator.java | 42 +++----
.../flink/api/java/operators/CrossOperator.java | 30 ++---
.../flink/api/java/operators/DataSink.java | 8 +-
.../flink/api/java/operators/Grouping.java | 2 +-
.../flink/api/java/operators/JoinOperator.java | 52 ++++----
.../java/operators/SingleInputUdfOperator.java | 6 +-
.../api/java/operators/SortedGrouping.java | 14 +--
.../api/java/operators/TwoInputUdfOperator.java | 12 +-
.../api/java/operators/UnsortedGrouping.java | 22 ++--
.../operators/join/JoinOperatorSetsBase.java | 26 ++--
.../translation/Tuple3WrappingCollector.java | 2 +-
.../translation/TupleWrappingCollector.java | 2 +-
.../record/functions/FunctionAnnotation.java | 6 +-
.../record/io/ExternalProcessInputFormat.java | 2 +-
.../flink/api/java/typeutils/AvroTypeInfo.java | 4 +-
.../typeutils/runtime/kryo/Serializers.java | 4 +-
.../flink/api/java/utils/DataSetUtils.java | 4 +-
.../examples/java8/relational/TPCHQuery10.java | 4 +-
.../main/java/org/apache/flink/graph/Graph.java | 30 ++---
.../graph/example/EuclideanGraphWeighing.java | 3 +-
.../flink/graph/example/GraphMetrics.java | 2 +-
.../flink/graph/example/IncrementalSSSP.java | 2 +-
.../flink/graph/example/MusicProfiles.java | 8 +-
.../apache/flink/graph/gsa/ApplyFunction.java | 6 +-
.../apache/flink/graph/gsa/GatherFunction.java | 6 +-
.../org/apache/flink/graph/gsa/Neighbor.java | 4 +-
.../org/apache/flink/graph/gsa/SumFunction.java | 6 +-
.../graph/library/ConnectedComponents.java | 2 +-
.../graph/library/GSAConnectedComponents.java | 2 +-
.../apache/flink/graph/library/GSAPageRank.java | 2 +-
.../apache/flink/graph/library/PageRank.java | 2 +-
.../flink/graph/spargel/MessagingFunction.java | 8 +-
.../graph/spargel/VertexCentricIteration.java | 2 +-
.../graph/spargel/VertexUpdateFunction.java | 16 ++-
.../flink/optimizer/dag/OptimizerNode.java | 2 +-
.../operators/OperatorDescriptorSingle.java | 2 +-
.../apache/flink/optimizer/plan/Channel.java | 1 -
.../optimizer/postpass/SparseKeySchema.java | 2 +-
.../apache/flink/runtime/blob/BlobService.java | 1 -
.../runtime/checkpoint/PendingCheckpoint.java | 1 -
.../flink/runtime/execution/ExecutionState.java | 4 +-
.../flink/runtime/filecache/FileCache.java | 2 +-
.../instance/SlotSharingGroupAssignment.java | 4 +-
.../io/disk/iomanager/BlockChannelReader.java | 2 +-
.../io/disk/iomanager/BlockChannelWriter.java | 2 +-
.../network/partition/consumer/InputGate.java | 8 +-
.../partition/consumer/SingleInputGate.java | 8 +-
.../task/IterationIntermediateTask.java | 2 +-
.../runtime/jobmanager/scheduler/Scheduler.java | 2 +-
.../sort/AbstractMergeInnerJoinIterator.java | 2 +-
.../runtime/operators/util/BloomFilter.java | 8 +-
.../runtime/webmonitor/WebMonitorUtils.java | 2 +-
.../runtime/yarn/AbstractFlinkYarnCluster.java | 4 +-
.../runtime/zookeeper/StateStorageHelper.java | 2 +-
.../zookeeper/ZooKeeperStateHandleStore.java | 8 +-
.../mapred/HadoopMapFunction.java | 2 +-
.../mapred/HadoopReduceCombineFunction.java | 2 +-
.../mapred/HadoopReduceFunction.java | 4 +-
.../org/apache/flink/api/java/JarHelper.java | 2 +-
.../flink/api/java/table/package-info.java | 8 +-
.../connectors/kafka/FlinkKafkaConsumer.java | 2 +-
.../kafka/partitioner/FixedPartitioner.java | 16 +--
.../examples/iteration/IterateExample.java | 7 +-
.../ml/IncrementalLearningSkeleton.java | 3 +-
.../socket/SocketTextStreamWordCount.java | 13 +-
.../examples/twitter/TwitterStream.java | 17 ++-
.../examples/wordcount/PojoExample.java | 4 +-
.../streaming/examples/wordcount/WordCount.java | 4 +-
.../streaming/api/datastream/DataStream.java | 2 +-
.../datastream/SingleOutputStreamOperator.java | 9 +-
.../environment/StreamExecutionEnvironment.java | 18 +--
.../functions/source/RichSourceFunction.java | 1 -
.../api/functions/source/SourceFunction.java | 12 +-
.../api/graph/StreamGraphGenerator.java | 2 +-
.../transformations/StreamTransformation.java | 14 +--
.../streaming/api/windowing/time/Time.java | 3 +-
.../streaming/runtime/tasks/StreamTask.java | 4 +-
.../tasks/OneInputStreamTaskTestHarness.java | 2 +-
.../runtime/tasks/StreamTaskTestHarness.java | 2 +-
.../tasks/TwoInputStreamTaskTestHarness.java | 2 +-
.../test/util/MultipleProgramsTestBase.java | 4 +-
.../test/recordJobs/wordcount/WordCount.java | 6 +
151 files changed, 513 insertions(+), 570 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
----------------------------------------------------------------------
diff --git a/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java b/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
index c638894..5485030 100644
--- a/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
+++ b/flink-clients/src/main/java/org/apache/flink/client/CliFrontend.java
@@ -696,7 +696,8 @@ public class CliFrontend {
* Creates a Packaged program from the given command line options.
*
* @return A PackagedProgram (upon success)
- * @throws java.io.FileNotFoundException, org.apache.flink.client.program.ProgramInvocationException, java.lang.Throwable
+ * @throws java.io.FileNotFoundException
+ * @throws org.apache.flink.client.program.ProgramInvocationException
*/
protected PackagedProgram buildProgram(ProgramOptions options)
throws FileNotFoundException, ProgramInvocationException
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationLocal.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationLocal.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationLocal.java
index 985cd68..56a0125 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationLocal.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationLocal.java
@@ -28,19 +28,16 @@ import org.apache.flink.storm.excamation.operators.ExclamationBolt;
* Implements the "Exclamation" program that attaches five exclamation mark to every line of a text files in a streaming
* fashion. The program is constructed as a regular {@link backtype.storm.generated.StormTopology} and submitted to
* Flink for execution in the same way as to a Storm {@link backtype.storm.LocalCluster}.
- * <p/>
+ * <p>
* This example shows how to run program directly within Java, thus it cannot be used to submit a
* {@link backtype.storm.generated.StormTopology} via Flink command line clients (ie, bin/flink).
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
- * Usage: <code>ExclamationLocal <text path> <result path></code><br/>
+ * <p>
+ * Usage: <code>ExclamationLocal <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from
* {@link org.apache.flink.examples.java.wordcount.util.WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>run a regular Storm program locally on Flink</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationTopology.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationTopology.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationTopology.java
index 70d25a2..9d94f5c 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationTopology.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationTopology.java
@@ -29,17 +29,14 @@ import org.apache.flink.storm.util.BoltPrintSink;
/**
* Implements the "Exclamation" program that attaches two exclamation marks to every line of a text files in a streaming
- * fashion. The program is constructed as a regular {@link StormTopology}.
- * <p/>
- * <p/>
+ * fashion. The program is constructed as a regular {@link backtype.storm.generated.StormTopology}.
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>Exclamation[Local|RemoteByClient|RemoteBySubmitter] <text path>
- * <result path></code><br/>
+ * <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>construct a regular Storm topology as Flink program</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithBolt.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithBolt.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithBolt.java
index 01ab907..19fe977 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithBolt.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithBolt.java
@@ -31,17 +31,14 @@ import backtype.storm.utils.Utils;
/**
* Implements the "Exclamation" program that attaches 3+x exclamation marks to every line of a text files in a streaming
- * fashion. The program is constructed as a regular {@link StormTopology}.
- * <p/>
- * <p/>
+ * fashion. The program is constructed as a regular {@link backtype.storm.generated.StormTopology}.
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage:
- * <code>ExclamationWithmBolt <text path> <result path> <number of exclamation marks></code><br/>
+ * <code>ExclamationWithmBolt <text path> <result path> <number of exclamation marks></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData} with x=2.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>use a Bolt within a Flink Streaming program</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithSpout.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithSpout.java
index 22938e5..a196995 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithSpout.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/excamation/ExclamationWithSpout.java
@@ -32,16 +32,13 @@ import backtype.storm.utils.Utils;
/**
* Implements the "Exclamation" program that attaches six exclamation marks to every line of a text files in a streaming
- * fashion. The program is constructed as a regular {@link StormTopology}.
- * <p/>
- * <p/>
+ * fashion. The program is constructed as a regular {@link backtype.storm.generated.StormTopology}.
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
- * Usage: <code>ExclamationWithSpout <text path> <result path></code><br/>
+ * <p>
+ * Usage: <code>ExclamationWithSpout <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>use a Storm spout within a Flink Streaming program</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
index 560fe51..03c87f4 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
@@ -33,14 +33,14 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* Implements a simple example with two declared output streams for the embedded spout.
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>handle multiple output stream of a spout</li>
* <li>accessing each stream by .split(...) and .select(...)</li>
* <li>strip wrapper data type SplitStreamType for further processing in Flink</li>
* </ul>
- * <p/>
+ * <p>
* This example would work the same way for multiple bolt output streams.
*/
public class SpoutSplitExample {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
index aa3a075..6ee7fd9 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
@@ -30,15 +30,12 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}.
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCount <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>use a Bolt within a Flink Streaming program.</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
index f72acb3..9bdcead 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
@@ -36,15 +36,12 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}. In contrast to {@link BoltTokenizerWordCount}
* the tokenizer's input is a POJO type and the single field is accessed by name.
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCount <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>how to access attributes by name within a Bolt for POJO type input streams
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
index 7617e95..019f1bc 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
@@ -38,15 +38,12 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}. In contrast to {@link BoltTokenizerWordCount}
* the tokenizer's input is a {@link Tuple} type and the single field is accessed by name.
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCount <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>how to access attributes by name within a Bolt for {@link Tuple} type input streams
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
index bb451fe..281780e 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
@@ -34,15 +34,12 @@ import org.apache.flink.util.Collector;
/**
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The used data source is a {@link IRichSpout Spout}.
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCount <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>use a Spout within a Flink Streaming program.</li>
@@ -89,7 +86,7 @@ public class SpoutSourceWordCount {
/**
* Implements the string tokenizer that splits sentences into words as a user-defined FlatMapFunction. The function
- * takes a line (String) and splits it into multiple pairs in the form of "(word,1)" (Tuple2<String, Integer>).
+ * takes a line (String) and splits it into multiple pairs in the form of "(word,1)" ({@code Tuple2<String, Integer>}).
*/
public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> {
private static final long serialVersionUID = 1L;
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocal.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocal.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocal.java
index 18f49c1..0a7dfa0 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocal.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocal.java
@@ -29,18 +29,15 @@ import org.apache.flink.storm.api.FlinkTopologyBuilder;
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The program is constructed as a regular {@link StormTopology} and submitted to Flink for execution in the
* same way as to a Storm {@link LocalCluster}.
- * <p/>
+ * <p>
* This example shows how to run program directly within Java, thus it cannot be used to submit a {@link StormTopology}
* via Flink command line clients (ie, bin/flink).
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCountLocal <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>run a regular Storm program locally on Flink</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocalByName.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocalByName.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocalByName.java
index 71a5e8d..67f4bbe 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocalByName.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountLocalByName.java
@@ -30,18 +30,15 @@ import org.apache.flink.storm.api.FlinkTopologyBuilder;
* fashion. The program is constructed as a regular {@link StormTopology} and submitted to Flink for execution in the
* same way as to a Storm {@link LocalCluster}. In contrast to {@link WordCountLocal} all bolts access the field of
* input tuples by name instead of index.
- * <p/>
+ * <p>
* This example shows how to run program directly within Java, thus it cannot be used to submit a {@link StormTopology}
* via Flink command line clients (ie, bin/flink).
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCountLocalByName <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>run a regular Storm program locally on Flink
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteByClient.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteByClient.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteByClient.java
index 2e4fb03..8cb7cdd 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteByClient.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteByClient.java
@@ -33,18 +33,15 @@ import org.apache.flink.storm.api.FlinkTopologyBuilder;
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The program is constructed as a regular {@link StormTopology} and submitted to Flink for execution in the
* same way as to a Storm cluster similar to {@link NimbusClient}. The Flink cluster can be local or remote.
- * <p/>
+ * <p>
* This example shows how to submit the program via Java, thus it cannot be used to submit a {@link StormTopology} via
* Flink command line clients (ie, bin/flink).
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCountRemoteByClient <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>submit a regular Storm program to a local or remote Flink cluster.</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteBySubmitter.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteBySubmitter.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteBySubmitter.java
index 173074c..a20843e 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteBySubmitter.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountRemoteBySubmitter.java
@@ -30,17 +30,14 @@ import org.apache.flink.storm.api.FlinkTopologyBuilder;
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The program is constructed as a regular {@link StormTopology} and submitted to Flink for execution in the
* same way as to a Storm cluster similar to {@link StormSubmitter}. The Flink cluster can be local or remote.
- * <p/>
+ * <p>
* This example shows how to submit the program via Java as well as Flink's command line client (ie, bin/flink).
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage: <code>WordCountRemoteBySubmitter <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>submit a regular Storm program to a local or remote Flink cluster.</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountTopology.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountTopology.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountTopology.java
index 8ee374d..138df65 100644
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountTopology.java
+++ b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/WordCountTopology.java
@@ -36,16 +36,13 @@ import org.apache.flink.storm.wordcount.operators.WordCountInMemorySpout;
/**
* Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
* fashion. The program is constructed as a regular {@link StormTopology}.
- * <p/>
- * <p/>
+ * <p>
* The input is a plain text file with lines separated by newline characters.
- * <p/>
- * <p/>
+ * <p>
* Usage:
* <code>WordCount[Local|LocalByName|RemoteByClient|RemoteBySubmitter] <text path> <result path></code><br>
* If no parameters are provided, the program is run with default data from {@link WordCountData}.
- * <p/>
- * <p/>
+ * <p>
* This example shows how to:
* <ul>
* <li>how to construct a regular Storm topology as Flink program</li>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkClient.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkClient.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkClient.java
index 2be7599..3607fad 100644
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkClient.java
+++ b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkClient.java
@@ -138,7 +138,7 @@ public class FlinkClient {
/**
* Return a reference to itself.
- * <p/>
+ * <p>
* {@link FlinkClient} mimics both, {@link NimbusClient} and {@link Nimbus}{@code .Client}, at once.
*
* @return A reference to itself.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkTopologyBuilder.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkTopologyBuilder.java
index 8a88eac..47aa68e 100644
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkTopologyBuilder.java
+++ b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/api/FlinkTopologyBuilder.java
@@ -55,8 +55,8 @@ import java.util.Set;
/**
* {@link FlinkTopologyBuilder} mimics a {@link TopologyBuilder}, but builds a Flink program instead of a Storm
* topology. Most methods (except {@link #createTopology()} are copied from the original {@link TopologyBuilder}
- * implementation to ensure equal behavior.<br />
- * <br />
+ * implementation to ensure equal behavior.<br>
+ * <br>
* <strong>CAUTION: {@link IRichStateSpout StateSpout}s are currently not supported.</strong>
*/
public class FlinkTopologyBuilder {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
index a4b5f8e..5056795 100644
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
+++ b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
@@ -20,10 +20,11 @@ package org.apache.flink.storm.util;
import org.apache.flink.streaming.api.datastream.DataStream;
/**
- * Used by {@link org.apache.flink.storm.wrappers.AbstractStormCollector AbstractStormCollector} to wrap
+ * Used by org.apache.flink.storm.wrappers.AbstractStormCollector to wrap
* output tuples if multiple output streams are declared. For this case, the Flink output data stream must be split via
* {@link DataStream#split(org.apache.flink.streaming.api.collector.selector.OutputSelector) .split(...)} using
* {@link StormStreamSelector}.
+ *
*/
public class SplitStreamType<T> {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
index 12d967a..d9a2f91 100644
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
+++ b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
@@ -42,8 +42,8 @@ import com.google.common.collect.Sets;
* A {@link BoltWrapper} wraps an {@link IRichBolt} in order to execute the Storm bolt within a Flink Streaming
* program. It takes the Flink input tuples of type {@code IN} and transforms them into {@link StormTuple}s that the
* bolt can process. Furthermore, it takes the bolt's output tuples and transforms them into Flink tuples of type
- * {@code OUT} (see {@link AbstractStormCollector} for supported types).<br />
- * <br />
+ * {@code OUT} (see {@link AbstractStormCollector} for supported types).<br>
+ * <br>
* <strong>CAUTION: currently, only simple bolts are supported! (ie, bolts that do not use the Storm configuration
* <code>Map</code> or <code>TopologyContext</code> that is provided by the bolt's <code>open(..)</code> method.
* Furthermore, acking and failing of tuples as well as accessing tuple attributes by field names is not supported so
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
index e78dd5c..62b36be 100644
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
+++ b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
@@ -38,15 +38,15 @@ import com.google.common.collect.Sets;
/**
* A {@link SpoutWrapper} wraps an {@link IRichSpout} in order to execute it within a Flink Streaming program. It
* takes the spout's output tuples and transforms them into Flink tuples of type {@code OUT} (see
- * {@link SpoutCollector} for supported types).<br />
- * <br />
+ * {@link SpoutCollector} for supported types).<br>
+ * <br>
* Per default, {@link SpoutWrapper} calls the wrapped spout's {@link IRichSpout#nextTuple() nextTuple()} method in
- * an infinite loop.<br />
+ * an infinite loop.<br>
* Alternatively, {@link SpoutWrapper} can call {@link IRichSpout#nextTuple() nextTuple()} for a finite number of
* times and terminate automatically afterwards (for finite input streams). The number of {@code nextTuple()} calls can
* be specified as a certain number of invocations or can be undefined. In the undefined case, {@link SpoutWrapper}
* terminates if no record was emitted to the output collector for the first time during a call to
- * {@link IRichSpout#nextTuple() nextTuple()}.<br />
+ * {@link IRichSpout#nextTuple() nextTuple()}.<br>
* If the given spout implements {@link FiniteSpout} interface and {@link #numberOfInvocations} is not provided or
* is {@code null}, {@link SpoutWrapper} calls {@link IRichSpout#nextTuple() nextTuple()} method until
* {@link FiniteSpout#reachedEnd()} returns true.
@@ -258,7 +258,7 @@ public final class SpoutWrapper<OUT> extends RichParallelSourceFunction<OUT> {
/**
* {@inheritDoc}
- * <p/>
+ * <p>
* Sets the {@link #isRunning} flag to {@code false}.
*/
@Override
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Entities.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Entities.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Entities.java
index d88ea34..8a8f91e 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Entities.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Entities.java
@@ -22,7 +22,7 @@ import java.util.List;
/**
* Entities which have been parsed out of the text of the
- * {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
+ * {@link org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
*/
public class Entities {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/HashTags.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/HashTags.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/HashTags.java
index 1900859..ba6f5c0 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/HashTags.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/HashTags.java
@@ -19,7 +19,7 @@ package org.apache.flink.contrib.tweetinputformat.model.tweet.entities;
/**
* Represents hashtags which have been parsed out of the
- * {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet} text.
+ * {@link org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet} text.
*/
public class HashTags {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Media.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Media.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Media.java
index f006aac..37d2ab8 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Media.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Media.java
@@ -21,7 +21,7 @@ import java.util.HashMap;
import java.util.Map;
/**
- * Represents media elements uploaded with the {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
+ * Represents media elements uploaded with the {@link org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
*/
public class Media {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Symbol.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Symbol.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Symbol.java
index db562e8..c2a6dd0 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Symbol.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/Symbol.java
@@ -19,7 +19,7 @@ package org.apache.flink.contrib.tweetinputformat.model.tweet.entities;
/**
* An array of financial symbols starting with the dollar sign extracted from the
- * {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet} text.
+ * {@link org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet} text.
*/
public class Symbol {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/URL.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/URL.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/URL.java
index 6d0f184..93ddebd 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/URL.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/URL.java
@@ -19,7 +19,7 @@ package org.apache.flink.contrib.tweetinputformat.model.tweet.entities;
/**
* Represents URLs included in the text of a Tweet or within textual fields of a
- * {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.User.Users} object.
+ * {@link org.apache.flink.contrib.tweetinputformat.model.tweet.entities.UserMention} object.
*/
public class URL {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/UserMention.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/UserMention.java b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/UserMention.java
index a56f7c7..2a3cf1a 100755
--- a/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/UserMention.java
+++ b/flink-contrib/flink-tweet-inputformat/src/main/java/org/apache/flink/contrib/tweetinputformat/model/tweet/entities/UserMention.java
@@ -19,7 +19,7 @@ package org.apache.flink.contrib.tweetinputformat.model.tweet.entities;
/**
* Represents other Twitter users mentioned in the text of the
- * {@link package org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
+ * {@link org.apache.flink.contrib.tweetinputformat.model.tweet.Tweet}.
*/
public class UserMention {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
index f38ecb0..b9ebaf7 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionConfig.java
@@ -677,7 +677,7 @@ public class ExecutionConfig implements Serializable {
private static final long serialVersionUID = 1L;
/**
- * Convert UserConfig into a Map<String, String> representation.
+ * Convert UserConfig into a {@code Map<String, String>} representation.
* This can be used by the runtime, for example for presenting the user config in the web frontend.
*
* @return Key/Value representation of the UserConfig, or null.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/ExecutionMode.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionMode.java b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionMode.java
index f3e958e..b1463cc 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/ExecutionMode.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/ExecutionMode.java
@@ -33,10 +33,10 @@ public enum ExecutionMode {
* pipelined manner) are data flows that branch (one data set consumed by multiple
* operations) and re-join later:
* <pre>{@code
- * DataSet data = ...;
- * DataSet mapped1 = data.map(new MyMapper());
- * DataSet mapped2 = data.map(new AnotherMapper());
- * mapped1.join(mapped2).where(...).equalTo(...);
+ * DataSet data = ...;
+ * DataSet mapped1 = data.map(new MyMapper());
+ * DataSet mapped2 = data.map(new AnotherMapper());
+ * mapped1.join(mapped2).where(...).equalTo(...);
* }</pre>
*/
PIPELINED,
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java b/flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java
index f5e959a..f587fee 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/accumulators/Histogram.java
@@ -23,7 +23,7 @@ import java.util.TreeMap;
/**
* Histogram accumulator, which builds a histogram in a distributed manner.
- * Implemented as a Integer->Integer TreeMap, so that the entries are sorted
+ * Implemented as a Integer->Integer TreeMap, so that the entries are sorted
* according to the values.
*
* This class does not extend to continuous values later, because it makes no
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/aggregators/Aggregator.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/aggregators/Aggregator.java b/flink-core/src/main/java/org/apache/flink/api/common/aggregators/Aggregator.java
index 035b591..aabb82f 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/aggregators/Aggregator.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/aggregators/Aggregator.java
@@ -47,7 +47,7 @@ import org.apache.flink.types.Value;
* }
*
* public boolean filter (Double value) {
- * if (value > 1000000.0) {
+ * if (value > 1000000.0) {
* agg.aggregate(1);
* return false
* }
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/distributions/DataDistribution.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/distributions/DataDistribution.java b/flink-core/src/main/java/org/apache/flink/api/common/distributions/DataDistribution.java
index 9bf1abf..ebf1319 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/distributions/DataDistribution.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/distributions/DataDistribution.java
@@ -39,7 +39,7 @@ public interface DataDistribution extends IOReadableWritable, Serializable {
* <p>
* Note: The last bucket's upper bound is actually discarded by many algorithms.
* The last bucket is assumed to hold all values <i>v</i> such that
- * {@code v > getBucketBoundary(n-1, n)}, where <i>n</i> is the number of buckets.
+ * {@code v > getBucketBoundary(n-1, n)}, where <i>n</i> is the number of buckets.
*
* @param bucketNum The number of the bucket for which to get the upper bound.
* @param totalNumBuckets The number of buckets to split the data into.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/CoGroupFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/CoGroupFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/CoGroupFunction.java
index 2d7a1d7..1098341 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/CoGroupFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/CoGroupFunction.java
@@ -28,12 +28,12 @@ import org.apache.flink.util.Collector;
* If a key is present in only one of the two inputs, it may be that one of the groups is empty.
* <p>
* The basic syntax for using CoGoup on two data sets is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> set1 = ...;
* DataSet<Y> set2 = ...;
*
* set1.coGroup(set2).where(<key-definition>).equalTo(<key-definition>).with(new MyCoGroupFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* {@code set1} is here considered the first input, {@code set2} the second input.
* <p>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
index 0e9d2a5..3660e63 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/CrossFunction.java
@@ -28,12 +28,12 @@ import java.io.Serializable;
* pair of elements, instead of processing 2-tuples that contain the pairs.
* <p>
* The basic syntax for using Cross on two data sets is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> set1 = ...;
* DataSet<Y> set2 = ...;
*
* set1.cross(set2).with(new MyCrossFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* {@code set1} is here considered the first input, {@code set2} the second input.
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java
index 1f21a96..1a80cb8 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/FilterFunction.java
@@ -25,11 +25,11 @@ import java.io.Serializable;
* The predicate decides whether to keep the element, or to discard it.
* <p>
* The basic syntax for using a FilterFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<X> result = input.filter(new MyFilterFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* <strong>IMPORTANT:</strong> The system assumes that the function does not
* modify the elements on which the predicate is applied. Violating this assumption
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatJoinFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatJoinFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatJoinFunction.java
index f99aec4..c79046b 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatJoinFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatJoinFunction.java
@@ -34,12 +34,12 @@ import java.io.Serializable;
* if their key is not contained in the other data set.
* <p>
* The basic syntax for using Join on two data sets is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> set1 = ...;
* DataSet<Y> set2 = ...;
*
* set1.join(set2).where(<key-definition>).equalTo(<key-definition>).with(new MyJoinFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* {@code set1} is here considered the first input, {@code set2} the second input.
* <p>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java
index 37c6e83..5b955f7 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/FlatMapFunction.java
@@ -29,11 +29,11 @@ import java.io.Serializable;
* use the {@link MapFunction}.
* <p>
* The basic syntax for using a FlatMapFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<Y> result = input.flatMap(new MyFlatMapFunction());
- * </blockquote></pre>
+ * }</pre>
*
* @param <T> Type of the input elements.
* @param <O> Type of the returned elements.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/FoldFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/FoldFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/FoldFunction.java
index a9c5b2b..c4be00b 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/FoldFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/FoldFunction.java
@@ -25,12 +25,12 @@ import java.io.Serializable;
* a single value, by applying a binary operation to an initial accumulator element every element from a group elements.
* <p>
* The basic syntax for using a FoldFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* X initialValue = ...;
* DataSet<X> result = input.fold(new MyFoldFunction(), initialValue);
- * </blockquote></pre>
+ * }</pre>
* <p>
* Like all functions, the FoldFunction needs to be serializable, as defined in {@link java.io.Serializable}.
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java
index befbee3..36500f6 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java
@@ -32,11 +32,11 @@ import org.apache.flink.util.Collector;
* {@link ReduceFunction}.
* <p>
* The basic syntax for using a grouped GroupReduceFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<X> result = input.groupBy(<key-definition>).reduceGroup(new MyGroupReduceFunction());
- * </blockquote></pre>
+ * }</pre>
*
* @param <T> Type of the elements that this function processes.
* @param <O> The type of the elements returned by the user-defined function.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/JoinFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/JoinFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/JoinFunction.java
index 085bae9..0032a0f 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/JoinFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/JoinFunction.java
@@ -29,12 +29,12 @@ import java.io.Serializable;
* if their key is not contained in the other data set.
* <p>
* The basic syntax for using Join on two data sets is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> set1 = ...;
* DataSet<Y> set2 = ...;
*
* set1.join(set2).where(<key-definition>).equalTo(<key-definition>).with(new MyJoinFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* {@code set1} is here considered the first input, {@code set2} the second input.
* <p>
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/MapFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/MapFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/MapFunction.java
index bbeacc6..45f3d1f 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/MapFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/MapFunction.java
@@ -28,11 +28,11 @@ import java.io.Serializable;
* using the {@link FlatMapFunction}.
* <p>
* The basic syntax for using a MapFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<Y> result = input.map(new MyMapFunction());
- * </blockquote></pre>
+ * }</pre>
*
* @param <T> Type of the input elements.
* @param <O> Type of the returned elements.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/MapPartitionFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/MapPartitionFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/MapPartitionFunction.java
index 9011d25..d2c0077 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/MapPartitionFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/MapPartitionFunction.java
@@ -31,11 +31,11 @@ import java.io.Serializable;
* For most of the simple use cases, consider using the {@link MapFunction} or {@link FlatMapFunction}.
* <p>
* The basic syntax for a MapPartitionFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<Y> result = input.mapPartition(new MyMapPartitionFunction());
- * </blockquote></pre>
+ * }</pre>
*
* @param <T> Type of the input elements.
* @param <O> Type of the returned elements.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/ReduceFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/ReduceFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/ReduceFunction.java
index 8de7861..1b34047 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/ReduceFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/ReduceFunction.java
@@ -32,11 +32,11 @@ import java.io.Serializable;
* execution strategies.
* <p>
* The basic syntax for using a grouped ReduceFunction is as follows:
- * <pre><blockquote>
+ * <pre>{@code
* DataSet<X> input = ...;
*
* DataSet<X> result = input.groupBy(<key-definition>).reduce(new MyReduceFunction());
- * </blockquote></pre>
+ * }</pre>
* <p>
* Like all functions, the ReduceFunction needs to be serializable, as defined in {@link java.io.Serializable}.
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/functions/RichGroupReduceFunction.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/functions/RichGroupReduceFunction.java b/flink-core/src/main/java/org/apache/flink/api/common/functions/RichGroupReduceFunction.java
index 48e27d3..b5edc64 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/functions/RichGroupReduceFunction.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/functions/RichGroupReduceFunction.java
@@ -49,7 +49,7 @@ public abstract class RichGroupReduceFunction<IN, OUT> extends AbstractRichFunct
* <p>
* This method is only ever invoked when the subclass of {@link RichGroupReduceFunction}
* adds the {@link Combinable} annotation, or if the <i>combinable</i> flag is set when defining
- * the <i>reduceGroup<i> operation via
+ * the <i>reduceGroup</i> operation via
* org.apache.flink.api.java.operators.GroupReduceOperator#setCombinable(boolean).
* <p>
* Since the reduce function will be called on the result of this method, it is important that this
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/io/FileOutputFormat.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/io/FileOutputFormat.java b/flink-core/src/main/java/org/apache/flink/api/common/io/FileOutputFormat.java
index 6854268..219877d 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/io/FileOutputFormat.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/io/FileOutputFormat.java
@@ -51,7 +51,7 @@ public abstract class FileOutputFormat<IT> extends RichOutputFormat<IT> implemen
/** A directory is always created, regardless of number of write tasks. */
ALWAYS,
- /** A directory is only created for parallel output tasks, i.e., number of output tasks > 1.
+ /** A directory is only created for parallel output tasks, i.e., number of output tasks > 1.
* If number of output tasks = 1, the output is written to a single file. */
PARONLY
}
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java b/flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java
index 1c79a37..74b0d01 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/operators/AbstractUdfOperator.java
@@ -105,7 +105,7 @@ public abstract class AbstractUdfOperator<OUT, FT extends Function> extends Oper
* Clears all previous broadcast inputs and binds the given inputs as
* broadcast variables of this operator.
*
- * @param inputs The <name, root> pairs to be set as broadcast inputs.
+ * @param inputs The {@code<name, root>} pairs to be set as broadcast inputs.
*/
public <T> void setBroadcastVariables(Map<String, Operator<T>> inputs) {
this.broadcastInputs.clear();
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
index 3a545e4..cd5da84 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/typeutils/TypeComparator.java
@@ -76,7 +76,7 @@ public abstract class TypeComparator<T> implements Serializable {
* of the fields from the record, this method may extract those fields.
* <p>
* A typical example for checking the equality of two elements is the following:
- * <pre>
+ * <pre>{@code
* E e1 = ...;
* E e2 = ...;
*
@@ -84,7 +84,7 @@ public abstract class TypeComparator<T> implements Serializable {
*
* acc.setReference(e1);
* boolean equal = acc.equalToReference(e2);
- * </pre>
+ * }</pre>
*
* The rational behind this method is that elements are typically compared using certain features that
* are extracted from them, (such de-serializing as a subset of fields). When setting the
@@ -113,7 +113,7 @@ public abstract class TypeComparator<T> implements Serializable {
* elements {@code e1} and {@code e2} via a comparator, this method can be used the
* following way.
*
- * <pre>
+ * <pre>{@code
* E e1 = ...;
* E e2 = ...;
*
@@ -124,7 +124,7 @@ public abstract class TypeComparator<T> implements Serializable {
* acc2.setReference(e2);
*
* int comp = acc1.compareToReference(acc2);
- * </pre>
+ * }</pre>
*
* The rational behind this method is that elements are typically compared using certain features that
* are extracted from them, (such de-serializing as a subset of fields). When setting the
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/configuration/ConfigConstants.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/configuration/ConfigConstants.java b/flink-core/src/main/java/org/apache/flink/configuration/ConfigConstants.java
index b64939e..d331548 100644
--- a/flink-core/src/main/java/org/apache/flink/configuration/ConfigConstants.java
+++ b/flink-core/src/main/java/org/apache/flink/configuration/ConfigConstants.java
@@ -600,7 +600,7 @@ public final class ConfigConstants {
public static final boolean DEFAULT_FILESYSTEM_OVERWRITE = false;
/**
- * The default behavior for output directory creating (create only directory when parallelism > 1).
+ * The default behavior for output directory creating (create only directory when parallelism > 1).
*/
public static final boolean DEFAULT_FILESYSTEM_ALWAYS_CREATE_DIRECTORY = false;
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java b/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
index 185b5f2..7dc92c6 100644
--- a/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
+++ b/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
@@ -162,8 +162,6 @@ public abstract class FileSystem {
*
* @return a reference to the {@link FileSystem} instance for accessing the
* local file system.
- * @throws IOException
- * thrown if a reference to the file system instance could not be obtained
*/
public static FileSystem getLocalFileSystem() {
// this should really never fail.
@@ -485,20 +483,20 @@ public abstract class FileSystem {
/**
* Initializes output directories on local file systems according to the given write mode.
*
- * WriteMode.CREATE & parallel output:
+ * WriteMode.CREATE & parallel output:
* - A directory is created if the output path does not exist.
* - An existing directory is reused, files contained in the directory are NOT deleted.
* - An existing file raises an exception.
*
- * WriteMode.CREATE & NONE parallel output:
+ * WriteMode.CREATE & NONE parallel output:
* - An existing file or directory raises an exception.
*
- * WriteMode.OVERWRITE & parallel output:
+ * WriteMode.OVERWRITE & parallel output:
* - A directory is created if the output path does not exist.
* - An existing directory is reused, files contained in the directory are NOT deleted.
* - An existing file is deleted and replaced by a new directory.
*
- * WriteMode.OVERWRITE & NONE parallel output:
+ * WriteMode.OVERWRITE & NONE parallel output:
* - An existing file or directory (and all its content) is deleted
*
* Files contained in an existing directory are not deleted, because multiple instances of a
@@ -646,19 +644,19 @@ public abstract class FileSystem {
/**
* Initializes output directories on distributed file systems according to the given write mode.
*
- * WriteMode.CREATE & parallel output:
+ * WriteMode.CREATE & parallel output:
* - A directory is created if the output path does not exist.
* - An existing file or directory raises an exception.
*
- * WriteMode.CREATE & NONE parallel output:
+ * WriteMode.CREATE & NONE parallel output:
* - An existing file or directory raises an exception.
*
- * WriteMode.OVERWRITE & parallel output:
+ * WriteMode.OVERWRITE & parallel output:
* - A directory is created if the output path does not exist.
* - An existing directory and its content is deleted and a new directory is created.
* - An existing file is deleted and replaced by a new directory.
*
- * WriteMode.OVERWRITE & NONE parallel output:
+ * WriteMode.OVERWRITE & NONE parallel output:
* - An existing file or directory is deleted and replaced by a new directory.
*
* @param outPath Output path that should be prepared.
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java b/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java
index 31d5563..8d13921 100644
--- a/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java
+++ b/flink-core/src/main/java/org/apache/flink/core/memory/MemorySegment.java
@@ -210,7 +210,7 @@ public abstract class MemorySegment {
}
/**
- * Wraps the chunk of the underlying memory located between <tt>offset<tt> and
+ * Wraps the chunk of the underlying memory located between <tt>offset</tt> and
* <tt>length</tt> in a NIO ByteBuffer.
*
* @param offset The offset in the memory segment.
@@ -1220,7 +1220,7 @@ public abstract class MemorySegment {
* @param offset2 Offset of seg2 to start comparing
* @param len Length of the compared memory region
*
- * @return 0 if equal, -1 if seg1 < seg2, 1 otherwise
+ * @return 0 if equal, -1 if seg1 < seg2, 1 otherwise
*/
public final int compare(MemorySegment seg2, int offset1, int offset2, int len) {
while (len >= 8) {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/types/Record.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/types/Record.java b/flink-core/src/main/java/org/apache/flink/types/Record.java
index 24ff979..8ef972f 100644
--- a/flink-core/src/main/java/org/apache/flink/types/Record.java
+++ b/flink-core/src/main/java/org/apache/flink/types/Record.java
@@ -803,7 +803,7 @@ public final class Record implements Value, CopyableValue<Record> {
* Bin-copies fields from a source record to this record. The following caveats apply:
*
* If the source field is in a modified state, no binary representation will exist yet.
- * In that case, this method is equivalent to setField(..., source.getField(..., <class>)).
+ * In that case, this method is equivalent to {@code setField(..., source.getField(..., <class>))}.
* In particular, if setValue is called on the source field Value instance, that change
* will propagate to this record.
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/types/StringValue.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/types/StringValue.java b/flink-core/src/main/java/org/apache/flink/types/StringValue.java
index 2249019..873e1dd 100644
--- a/flink-core/src/main/java/org/apache/flink/types/StringValue.java
+++ b/flink-core/src/main/java/org/apache/flink/types/StringValue.java
@@ -377,7 +377,7 @@ public class StringValue implements NormalizableKey<StringValue>, CharSequence,
* @param prefix The prefix character sequence.
* @param startIndex The position to start checking for the prefix.
*
- * @return True, if this StringValue substring, starting at position <code>startIndex</code> has </code>prefix</code>
+ * @return True, if this StringValue substring, starting at position <code>startIndex</code> has <code>prefix</code>
* as its prefix.
*/
public boolean startsWith(CharSequence prefix, int startIndex) {
@@ -403,7 +403,7 @@ public class StringValue implements NormalizableKey<StringValue>, CharSequence,
*
* @param prefix The prefix character sequence.
*
- * @return True, if this StringValue has </code>prefix</code> as its prefix.
+ * @return True, if this StringValue has <code>prefix</code> as its prefix.
*/
public boolean startsWith(CharSequence prefix) {
return startsWith(prefix, 0);
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-core/src/main/java/org/apache/flink/util/Visitable.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/util/Visitable.java b/flink-core/src/main/java/org/apache/flink/util/Visitable.java
index 49a7866..559cb96 100644
--- a/flink-core/src/main/java/org/apache/flink/util/Visitable.java
+++ b/flink-core/src/main/java/org/apache/flink/util/Visitable.java
@@ -38,7 +38,7 @@ public interface Visitable<T extends Visitable<T>> {
* and then invokes the post-visit method.
* <p>
* A typical code example is the following:
- * <code>
+ * <pre>{@code
* public void accept(Visitor<Operator> visitor) {
* boolean descend = visitor.preVisit(this);
* if (descend) {
@@ -48,7 +48,7 @@ public interface Visitable<T extends Visitable<T>> {
* visitor.postVisit(this);
* }
* }
- * </code>
+ * }</pre>
*
* @param visitor The visitor to be called with this object as the parameter.
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
index 73f90ca..2db6f65 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/KMeans.java
@@ -198,7 +198,7 @@ public class KMeans {
// USER FUNCTIONS
// *************************************************************************
- /** Converts a Tuple2<Double,Double> into a Point. */
+ /** Converts a {@code Tuple2<Double,Double>} into a Point. */
@ForwardedFields("0->x; 1->y")
public static final class TuplePointConverter implements MapFunction<Tuple2<Double, Double>, Point> {
@@ -208,7 +208,7 @@ public class KMeans {
}
}
- /** Converts a Tuple3<Integer, Double,Double> into a Centroid. */
+ /** Converts a {@code Tuple3<Integer, Double,Double>} into a Centroid. */
@ForwardedFields("0->id; 1->x; 2->y")
public static final class TupleCentroidConverter implements MapFunction<Tuple3<Integer, Double, Double>, Centroid> {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
index 8f44034..8f48d0a 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/clustering/util/KMeansDataGenerator.java
@@ -66,6 +66,8 @@ public class KMeansDataGenerator {
* <li><b>Optional</b> Double: Value range of cluster centers
* <li><b>Optional</b> Long: Random seed
* </ol>
+ *
+ * @throws IOException
*/
public static void main(String[] args) throws IOException {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
index 08f90a6..8e87892 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/distcp/DistCp.java
@@ -51,7 +51,7 @@ import java.util.Map;
* (see <a href="http://hadoop.apache.org/docs/r1.2.1/distcp.html">http://hadoop.apache.org/docs/r1.2.1/distcp.html</a>)
* with a dynamic input format
* Note that this tool does not deal with retriability. Additionally, empty directories are not copied over.
- * <p/>
+ * <p>
* When running locally, local file systems paths can be used.
* However, in a distributed environment HDFS paths must be provided both as input and output.
*/
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/graph/PageRankBasic.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/graph/PageRankBasic.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/graph/PageRankBasic.java
index f05a15d..7b05158 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/graph/PageRankBasic.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/graph/PageRankBasic.java
@@ -53,7 +53,7 @@ import org.apache.flink.examples.java.graph.util.PageRankData;
* For example <code>"1\n2\n12\n42\n63"</code> gives five pages with IDs 1, 2, 12, 42, and 63.
* <li>Links are represented as pairs of page IDs which are separated by space
* characters. Links are separated by new-line characters.<br>
- * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (directed) links (1)->(2), (2)->(12), (1)->(12), and (42)->(63).<br>
+ * For example <code>"1 2\n2 12\n1 12\n42 63"</code> gives four (directed) links (1)->(2), (2)->(12), (1)->(12), and (42)->(63).<br>
* For this simple implementation it is required that each page has at least one incoming and one outgoing link (a page can point to itself).
* </ul>
*
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
index 341daa6..9c3356c 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/ml/LinearRegression.java
@@ -182,7 +182,7 @@ public class LinearRegression {
// USER FUNCTIONS
// *************************************************************************
- /** Converts a Tuple2<Double,Double> into a Data. */
+ /** Converts a {@code Tuple2<Double,Double>} into a Data. */
@ForwardedFields("0->x; 1->y")
public static final class TupleDataConverter implements MapFunction<Tuple2<Double, Double>, Data> {
@@ -192,7 +192,7 @@ public class LinearRegression {
}
}
- /** Converts a Tuple2<Double,Double> into a Params. */
+ /** Converts a {@code Tuple2<Double,Double>} into a Params. */
@ForwardedFields("0->theta0; 1->theta1")
public static final class TupleParamsConverter implements MapFunction<Tuple2<Double, Double>,Params> {
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
index 3dc0472..495ff26 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery10.java
@@ -39,7 +39,7 @@ import org.apache.flink.api.java.ExecutionEnvironment;
* This program implements the following SQL equivalent:
*
* <p>
- * <code><pre>
+ * <pre>{@code
* SELECT
* c_custkey,
* c_name,
@@ -64,7 +64,7 @@ import org.apache.flink.api.java.ExecutionEnvironment;
* c_acctbal,
* n_name,
* c_address
- * </pre></code>
+ * }</pre>
*
* <p>
* Compared to the original TPC-H query this version does not print
http://git-wip-us.apache.org/repos/asf/flink/blob/680b5a90/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
----------------------------------------------------------------------
diff --git a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
index 9a6e58c..a9e47f6 100644
--- a/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
+++ b/flink-examples/flink-java-examples/src/main/java/org/apache/flink/examples/java/relational/TPCHQuery3.java
@@ -42,7 +42,7 @@ import org.apache.flink.api.java.tuple.Tuple4;
* This program implements the following SQL equivalent:
*
* <p>
- * <code><pre>
+ * <pre>{@code
* SELECT
* l_orderkey,
* SUM(l_extendedprice*(1-l_discount)) AS revenue,
@@ -61,7 +61,7 @@ import org.apache.flink.api.java.tuple.Tuple4;
* l_orderkey,
* o_orderdate,
* o_shippriority;
- * </pre></code>
+ * }</pre>
*
* <p>
* Compared to the original TPC-H query this version does not sort the result by revenue