You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by se...@apache.org on 2015/08/11 13:14:19 UTC
[1/4] flink git commit: [FLINK-2357] [web dashboard] Update Node.js
installation instructions
Repository: flink
Updated Branches:
refs/heads/master 4ce9475cd -> f50ae26a2
[FLINK-2357] [web dashboard] Update Node.js installation instructions
This closes #1006
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/d09fc766
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/d09fc766
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/d09fc766
Branch: refs/heads/master
Commit: d09fc7663542f04b198ddb2bc3eb51d5786d56fa
Parents: 4ce9475
Author: Enrique Bautista <eb...@gmail.com>
Authored: Mon Aug 10 22:19:39 2015 +0200
Committer: Stephan Ewen <se...@apache.org>
Committed: Tue Aug 11 12:03:23 2015 +0200
----------------------------------------------------------------------
flink-runtime-web/README.md | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/d09fc766/flink-runtime-web/README.md
----------------------------------------------------------------------
diff --git a/flink-runtime-web/README.md b/flink-runtime-web/README.md
index 2e8c23e..4eb32a1 100644
--- a/flink-runtime-web/README.md
+++ b/flink-runtime-web/README.md
@@ -60,14 +60,10 @@ The dashboard files are all pre-built, so one can try it out without building it
Depending on your version of Linux or MacOS, you may need to manually install *node.js* and *bower*.
-#### Ubuntu Linux (12.04 and 14.04)
+#### Ubuntu Linux
+
+Install *node.js* by following [these instructions](https://github.com/joyent/node/wiki/installing-node.js-via-package-manager).
-Install *node.js* via
-```
-sudo add-apt-repository ppa:chris-lea/node.js
-sudo apt-get update
-sudo apt-get -y install nodejs
-```
Verify that the installed version is at least *2.11.3*, via `npm -version`.
[4/4] flink git commit: [FLINK-2277] [scala api] Add flag to set
delta iteration solution set to unmanaged
Posted by se...@apache.org.
[FLINK-2277] [scala api] Add flag to set delta iteration solution set to unmanaged
This closes #1005
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/f50ae26a
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/f50ae26a
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/f50ae26a
Branch: refs/heads/master
Commit: f50ae26a2fb4a0c7f5b390e2f0f5528be9f61730
Parents: b42fbf7
Author: Pieter-Jan Van Aeken <pi...@euranova.eu>
Authored: Mon Aug 10 15:16:08 2015 +0200
Committer: Stephan Ewen <se...@apache.org>
Committed: Tue Aug 11 13:13:47 2015 +0200
----------------------------------------------------------------------
.../org/apache/flink/api/scala/DataSet.scala | 60 ++++++++++++++++++++
1 file changed, 60 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/f50ae26a/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala
----------------------------------------------------------------------
diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala b/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala
index 167aa26..207bc5d 100644
--- a/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala
+++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/DataSet.scala
@@ -1075,6 +1075,36 @@ class DataSet[T: ClassTag](set: JavaDataSet[T]) {
*
* Note: The syntax of delta iterations are very likely going to change soon.
*/
+ def iterateDelta[R: ClassTag](workset: DataSet[R], maxIterations: Int, keyFields: Array[Int],
+ solutionSetUnManaged: Boolean)(
+ stepFunction: (DataSet[T], DataSet[R]) => (DataSet[T], DataSet[R])) = {
+ val key = new ExpressionKeys[T](keyFields, javaSet.getType, false)
+
+ val iterativeSet = new DeltaIteration[T, R](
+ javaSet.getExecutionEnvironment,
+ javaSet.getType,
+ javaSet,
+ workset.javaSet,
+ key,
+ maxIterations)
+
+ iterativeSet.setSolutionSetUnManaged(solutionSetUnManaged)
+
+ val (newSolution, newWorkset) = stepFunction(
+ wrap(iterativeSet.getSolutionSet),
+ wrap(iterativeSet.getWorkset))
+ val result = iterativeSet.closeWith(newSolution.javaSet, newWorkset.javaSet)
+ wrap(result)
+ }
+
+ /**
+ * Creates a new DataSet by performing delta (or workset) iterations using the given step
+ * function. At the beginning `this` DataSet is the solution set and `workset` is the Workset.
+ * The iteration step function gets the current solution set and workset and must output the
+ * delta for the solution set and the workset for the next iteration.
+ *
+ * Note: The syntax of delta iterations are very likely going to change soon.
+ */
def iterateDelta[R: ClassTag](workset: DataSet[R], maxIterations: Int, keyFields: Array[String])(
stepFunction: (DataSet[T], DataSet[R]) => (DataSet[T], DataSet[R])) = {
@@ -1094,6 +1124,36 @@ class DataSet[T: ClassTag](set: JavaDataSet[T]) {
wrap(result)
}
+ /**
+ * Creates a new DataSet by performing delta (or workset) iterations using the given step
+ * function. At the beginning `this` DataSet is the solution set and `workset` is the Workset.
+ * The iteration step function gets the current solution set and workset and must output the
+ * delta for the solution set and the workset for the next iteration.
+ *
+ * Note: The syntax of delta iterations are very likely going to change soon.
+ */
+ def iterateDelta[R: ClassTag](workset: DataSet[R], maxIterations: Int, keyFields: Array[String],
+ solutionSetUnManaged: Boolean)(
+ stepFunction: (DataSet[T], DataSet[R]) => (DataSet[T], DataSet[R])) = {
+
+ val key = new ExpressionKeys[T](keyFields, javaSet.getType)
+ val iterativeSet = new DeltaIteration[T, R](
+ javaSet.getExecutionEnvironment,
+ javaSet.getType,
+ javaSet,
+ workset.javaSet,
+ key,
+ maxIterations)
+
+ iterativeSet.setSolutionSetUnManaged(solutionSetUnManaged)
+
+ val (newSolution, newWorkset) = stepFunction(
+ wrap(iterativeSet.getSolutionSet),
+ wrap(iterativeSet.getWorkset))
+ val result = iterativeSet.closeWith(newSolution.javaSet, newWorkset.javaSet)
+ wrap(result)
+ }
+
// -------------------------------------------------------------------------------------------
// Custom Operators
// -------------------------------------------------------------------------------------------
[2/4] flink git commit: [FLINK-2502] [storm compatibility] Fix
FiniteStormSpout documenation rendering - added missing empty lines -
additional minor improvements
Posted by se...@apache.org.
[FLINK-2502] [storm compatibility] Fix FiniteStormSpout documenation rendering
- added missing empty lines
- additional minor improvements
This closes #1002
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/5bb855ba
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/5bb855ba
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/5bb855ba
Branch: refs/heads/master
Commit: 5bb855bac7441701495ce47db7ba03ab0e0c6963
Parents: d09fc76
Author: mjsax <mj...@informatik.hu-berlin.de>
Authored: Sun Aug 9 14:22:56 2015 +0200
Committer: Stephan Ewen <se...@apache.org>
Committed: Tue Aug 11 12:06:49 2015 +0200
----------------------------------------------------------------------
docs/apis/storm_compatibility.md | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/5bb855ba/docs/apis/storm_compatibility.md
----------------------------------------------------------------------
diff --git a/docs/apis/storm_compatibility.md b/docs/apis/storm_compatibility.md
index 1390b92..b38667b 100644
--- a/docs/apis/storm_compatibility.md
+++ b/docs/apis/storm_compatibility.md
@@ -171,10 +171,10 @@ See [BoltTokenizerWordCountPojo](https://github.com/apache/flink/tree/master/fli
## Finite Storm Spouts
-In Flink streaming, sources can be finite - i.e. emit a finite number of records and stop after emitting the last record -, however, Storm spouts always emit infinite streams.
+In Flink streaming, sources can be finite – i.e., emit a finite number of records and stop after emitting the last record –, however, Storm spouts always emit infinite streams.
The bridge between the two approach is the `FiniteStormSpout` interface which, in addition to `IRichSpout`, contains a `reachedEnd()` method, where the user can specify a stopping-condition.
The user can create a finite Storm spout by implementing this interface instead of `IRichSpout`, and implementing the `reachedEnd()`method in addition.
-When used as part of a Flink topology, a `FiniteStormSpout` should be wrapped in a `FiniteStormSpoutWrapper` class.
+When used as part of a Flink topology, a `FiniteStormSpout` should be wrapped by `FiniteStormSpoutWrapper`.
Although finite Storm spouts are not necessary to embed Storm spouts into a Flink streaming program or to submit a whole Storm topology to Flink, there are cases where they may come in handy:
@@ -186,6 +186,7 @@ Although finite Storm spouts are not necessary to embed Storm spouts into a Flin
A `FiniteStormSpout` can be still used as a normal, infinite Storm spout by changing its wrapper class to `StormSpoutWraper` in the Flink topology.
An example of a finite Storm spout that emits records for 10 seconds only:
+
<div class="codetabs" markdown="1">
<div data-lang="java" markdown="1">
~~~java
@@ -203,6 +204,7 @@ public class TimedFiniteStormSpout extends AbstractStormSpout implements FiniteS
</div>
Using a `FiniteStormSpout` in a Flink topology:
+
<div class="codetabs" markdown="1">
<div data-lang="java" markdown="1">
~~~java
[3/4] flink git commit: [FLINK-2500] [streaming] Code cleanup in
DataStream
Posted by se...@apache.org.
[FLINK-2500] [streaming] Code cleanup in DataStream
This closes #1001
Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/b42fbf7a
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/b42fbf7a
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/b42fbf7a
Branch: refs/heads/master
Commit: b42fbf7a81c5b57dcf9760825edb175ffd944fb2
Parents: 5bb855b
Author: HuangWHWHW <40...@qq.com>
Authored: Sat Aug 8 14:46:46 2015 +0800
Committer: Stephan Ewen <se...@apache.org>
Committed: Tue Aug 11 12:08:54 2015 +0200
----------------------------------------------------------------------
.../apache/flink/streaming/api/datastream/DataStream.java | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/flink/blob/b42fbf7a/flink-staging/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
----------------------------------------------------------------------
diff --git a/flink-staging/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java b/flink-staging/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
index 5bd3fb8..10ed5e3 100644
--- a/flink-staging/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
+++ b/flink-staging/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
@@ -151,10 +151,10 @@ public class DataStream<OUT> {
this.iterationWaitTime = dataStream.iterationWaitTime;
this.unionedStreams = new ArrayList<DataStream<OUT>>();
this.unionedStreams.add(this);
- if (dataStream.unionedStreams.size() > 1) {
- for (int i = 1; i < dataStream.unionedStreams.size(); i++) {
- this.unionedStreams.add(new DataStream<OUT>(dataStream.unionedStreams.get(i)));
- }
+
+ int size = dataStream.unionedStreams.size();
+ for (int i = 1; i < size; i++) {
+ this.unionedStreams.add(new DataStream<OUT>(dataStream.unionedStreams.get(i)));
}
}