You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by tr...@apache.org on 2019/01/11 07:31:22 UTC
[flink] branch master updated: [FLINK-10509][storm] Remove
flink-storm
This is an automated email from the ASF dual-hosted git repository.
trohrmann pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/master by this push:
new 2294cdd [FLINK-10509][storm] Remove flink-storm
2294cdd is described below
commit 2294cddac1674fbd760f75429c9e60c571826de9
Author: zentol <ch...@apache.org>
AuthorDate: Thu Jan 10 12:04:16 2019 +0100
[FLINK-10509][storm] Remove flink-storm
This closes #7453.
---
README.md | 2 +-
docs/dev/libs/storm_compatibility.md | 244 -------
docs/redirects/storm_compat.md | 24 -
flink-contrib/flink-storm-examples/README.md | 20 -
flink-contrib/flink-storm-examples/pom.xml | 318 ---------
.../storm/exclamation/ExclamationWithBolt.java | 141 ----
.../storm/exclamation/ExclamationWithSpout.java | 147 -----
.../exclamation/operators/ExclamationBolt.java | 79 ---
.../flink/storm/split/SpoutSplitExample.java | 151 -----
.../flink/storm/split/operators/RandomSpout.java | 80 ---
.../storm/split/operators/VerifyAndEnrichBolt.java | 68 --
.../apache/flink/storm/util/AbstractBoltSink.java | 76 ---
.../apache/flink/storm/util/AbstractLineSpout.java | 70 --
.../org/apache/flink/storm/util/BoltFileSink.java | 76 ---
.../org/apache/flink/storm/util/BoltPrintSink.java | 45 --
.../org/apache/flink/storm/util/FileSpout.java | 88 ---
.../apache/flink/storm/util/FiniteFileSpout.java | 76 ---
.../flink/storm/util/FiniteInMemorySpout.java | 37 --
.../org/apache/flink/storm/util/InMemorySpout.java | 42 --
.../apache/flink/storm/util/OutputFormatter.java | 41 --
.../flink/storm/util/SimpleOutputFormatter.java | 45 --
.../flink/storm/util/TupleOutputFormatter.java | 42 --
.../storm/wordcount/BoltTokenizerWordCount.java | 119 ----
.../wordcount/BoltTokenizerWordCountPojo.java | 132 ----
.../wordcount/BoltTokenizerWordCountWithNames.java | 135 ----
.../storm/wordcount/SpoutSourceWordCount.java | 154 -----
.../storm/wordcount/operators/BoltCounter.java | 90 ---
.../wordcount/operators/BoltCounterByName.java | 90 ---
.../storm/wordcount/operators/BoltTokenizer.java | 78 ---
.../wordcount/operators/BoltTokenizerByName.java | 78 ---
.../wordcount/operators/WordCountDataPojos.java | 65 --
.../wordcount/operators/WordCountDataTuple.java | 37 --
.../wordcount/operators/WordCountFileSpout.java | 39 --
.../operators/WordCountInMemorySpout.java | 40 --
.../flink/storm/wordcount/util/WordCountData.java | 64 --
.../exclamation/ExclamationWithBoltITCase.java | 43 --
.../exclamation/ExclamationWithSpoutITCase.java | 41 --
.../storm/exclamation/util/ExclamationData.java | 101 ---
.../org/apache/flink/storm/split/SplitITCase.java | 56 --
.../wordcount/BoltTokenizerWordCountITCase.java | 41 --
.../BoltTokenizerWordCountPojoITCase.java | 41 --
.../BoltTokenizerWordCountWithNamesITCase.java | 41 --
.../wordcount/SpoutSourceWordCountITCase.java | 41 --
.../src/test/resources/log4j-test.properties | 27 -
flink-contrib/flink-storm/README.md | 16 -
flink-contrib/flink-storm/pom.xml | 193 ------
.../org/apache/flink/storm/util/FiniteSpout.java | 36 --
.../flink/storm/util/NullTerminatingSpout.java | 98 ---
.../apache/flink/storm/util/SplitStreamMapper.java | 40 --
.../apache/flink/storm/util/SplitStreamType.java | 54 --
.../storm/util/SpoutOutputCollectorObserver.java | 89 ---
.../org/apache/flink/storm/util/StormConfig.java | 123 ----
.../flink/storm/util/StormStreamSelector.java | 48 --
.../storm/wrappers/AbstractStormCollector.java | 171 -----
.../apache/flink/storm/wrappers/BoltCollector.java | 94 ---
.../apache/flink/storm/wrappers/BoltWrapper.java | 275 --------
.../flink/storm/wrappers/FlinkTopologyContext.java | 163 -----
.../storm/wrappers/MergedInputsBoltWrapper.java | 127 ----
.../storm/wrappers/SetupOutputFieldsDeclarer.java | 66 --
.../flink/storm/wrappers/SpoutCollector.java | 86 ---
.../apache/flink/storm/wrappers/SpoutWrapper.java | 305 ---------
.../apache/flink/storm/wrappers/StormTuple.java | 398 ------------
.../flink/storm/wrappers/WrapperSetupHelper.java | 161 -----
.../org/apache/flink/storm/util/AbstractTest.java | 42 --
.../apache/flink/storm/util/FiniteTestSpout.java | 80 ---
.../flink/storm/util/NullTerminatingSpoutTest.java | 88 ---
.../util/SpoutOutputCollectorObserverTest.java | 69 --
.../flink/storm/util/StormStreamSelectorTest.java | 53 --
.../org/apache/flink/storm/util/TestDummyBolt.java | 78 ---
.../apache/flink/storm/util/TestDummySpout.java | 86 ---
.../java/org/apache/flink/storm/util/TestSink.java | 64 --
.../flink/storm/wrappers/BoltCollectorTest.java | 148 -----
.../flink/storm/wrappers/BoltWrapperTest.java | 389 -----------
.../storm/wrappers/FlinkTopologyContextTest.java | 113 ----
.../wrappers/SetupOutputFieldsDeclarerTest.java | 94 ---
.../flink/storm/wrappers/SpoutCollectorTest.java | 148 -----
.../flink/storm/wrappers/SpoutWrapperTest.java | 216 -------
.../flink/storm/wrappers/StormTupleTest.java | 710 ---------------------
.../apache/flink/storm/wrappers/TestContext.java | 61 --
.../storm/wrappers/WrapperSetupHelperTest.java | 138 ----
.../src/test/resources/log4j-test.properties | 27 -
flink-contrib/pom.xml | 2 -
tools/travis/stage.sh | 2 -
83 files changed, 1 insertion(+), 8615 deletions(-)
diff --git a/README.md b/README.md
index 64868e1..34bf2c4 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,7 @@ Learn more about Flink at [http://flink.apache.org/](http://flink.apache.org/)
* Custom memory management for efficient and robust switching between in-memory and out-of-core data processing algorithms
-* Compatibility layers for Apache Hadoop MapReduce and Apache Storm
+* Compatibility layers for Apache Hadoop MapReduce
* Integration with YARN, HDFS, HBase, and other components of the Apache Hadoop ecosystem
diff --git a/docs/dev/libs/storm_compatibility.md b/docs/dev/libs/storm_compatibility.md
deleted file mode 100644
index 8847c5b..0000000
--- a/docs/dev/libs/storm_compatibility.md
+++ /dev/null
@@ -1,244 +0,0 @@
----
-title: "Storm Compatibility"
-is_beta: true
-nav-parent_id: libs
-nav-pos: 2
----
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-[Flink streaming]({{ site.baseurl }}/dev/datastream_api.html) is compatible with Apache Storm interfaces and therefore allows
-reusing code that was implemented for Storm.
-
-You can use Storm `Spout`/`Bolt` as source/operator in Flink streaming programs.
-
-This document shows how to use existing Storm code with Flink.
-
-* This will be replaced by the TOC
-{:toc}
-
-# Project Configuration
-
-Support for Storm is contained in the `flink-storm` Maven module.
-The code resides in the `org.apache.flink.storm` package.
-
-Add the following dependency to your `pom.xml` if you want to execute Storm code in Flink.
-
-{% highlight xml %}
-<dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-storm{{ site.scala_version_suffix }}</artifactId>
- <version>{{site.version}}</version>
-</dependency>
-{% endhighlight %}
-
-**Please note**: Do not add `storm-core` as a dependency. It is already included via `flink-storm`.
-
-**Please note**: `flink-storm` is not part of the provided binary Flink distribution.
-Thus, you need to include `flink-storm` classes (and their dependencies) in your program jar (also called uber-jar or fat-jar) that is submitted to Flink's JobManager.
-See *WordCount Storm* within `flink-storm-examples/pom.xml` for an example how to package a jar correctly.
-
-If you want to avoid large uber-jars, you can manually copy `storm-core-0.9.4.jar`, `json-simple-1.1.jar` and `flink-storm-{{site.version}}.jar` into Flink's `lib/` folder of each cluster node (*before* the cluster is started).
-For this case, it is sufficient to include only your own Spout and Bolt classes (and their internal dependencies) into the program jar.
-
-# Embed Storm Operators in Flink Streaming Programs
-
-Spouts and Bolts can be embedded into regular streaming programs.
-The Storm compatibility layer offers a wrapper classes for each, namely `SpoutWrapper` and `BoltWrapper` (`org.apache.flink.storm.wrappers`).
-
-Per default, both wrappers convert Storm output tuples to Flink's [Tuple]({{site.baseurl}}/dev/api_concepts.html#tuples-and-case-classes) types (ie, `Tuple0` to `Tuple25` according to the number of fields of the Storm tuples).
-For single field output tuples a conversion to the field's data type is also possible (eg, `String` instead of `Tuple1<String>`).
-
-Because Flink cannot infer the output field types of Storm operators, it is required to specify the output type manually.
-In order to get the correct `TypeInformation` object, Flink's `TypeExtractor` can be used.
-
-## Embed Spouts
-
-In order to use a Spout as Flink source, use `StreamExecutionEnvironment.addSource(SourceFunction, TypeInformation)`.
-The Spout object is handed to the constructor of `SpoutWrapper<OUT>` that serves as first argument to `addSource(...)`.
-The generic type declaration `OUT` specifies the type of the source output stream.
-
-<div class="codetabs" markdown="1">
-<div data-lang="java" markdown="1">
-{% highlight java %}
-StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
-// stream has `raw` type (single field output streams only)
-DataStream<String> rawInput = env.addSource(
- new SpoutWrapper<String>(new FileSpout(localFilePath), new String[] { Utils.DEFAULT_STREAM_ID }), // emit default output stream as raw type
- TypeExtractor.getForClass(String.class)); // output type
-
-// process data stream
-[...]
-{% endhighlight %}
-</div>
-</div>
-
-If a Spout emits a finite number of tuples, `SpoutWrapper` can be configures to terminate automatically by setting `numberOfInvocations` parameter in its constructor.
-This allows the Flink program to shut down automatically after all data is processed.
-Per default the program will run until it is [canceled]({{site.baseurl}}/ops/cli.html) manually.
-
-
-## Embed Bolts
-
-In order to use a Bolt as Flink operator, use `DataStream.transform(String, TypeInformation, OneInputStreamOperator)`.
-The Bolt object is handed to the constructor of `BoltWrapper<IN,OUT>` that serves as last argument to `transform(...)`.
-The generic type declarations `IN` and `OUT` specify the type of the operator's input and output stream, respectively.
-
-<div class="codetabs" markdown="1">
-<div data-lang="java" markdown="1">
-{% highlight java %}
-StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-DataStream<String> text = env.readTextFile(localFilePath);
-
-DataStream<Tuple2<String, Integer>> counts = text.transform(
- "tokenizer", // operator name
- TypeExtractor.getForObject(new Tuple2<String, Integer>("", 0)), // output type
- new BoltWrapper<String, Tuple2<String, Integer>>(new BoltTokenizer())); // Bolt operator
-
-// do further processing
-[...]
-{% endhighlight %}
-</div>
-</div>
-
-### Named Attribute Access for Embedded Bolts
-
-Bolts can accesses input tuple fields via name (additionally to access via index).
-To use this feature with embedded Bolts, you need to have either a
-
- 1. [POJO]({{site.baseurl}}/dev/api_concepts.html#pojos) type input stream or
- 2. [Tuple]({{site.baseurl}}/dev/api_concepts.html#tuples-and-case-classes) type input stream and specify the input schema (i.e. name-to-index-mapping)
-
-For POJO input types, Flink accesses the fields via reflection.
-For this case, Flink expects either a corresponding public member variable or public getter method.
-For example, if a Bolt accesses a field via name `sentence` (eg, `String s = input.getStringByField("sentence");`), the input POJO class must have a member variable `public String sentence;` or method `public String getSentence() { ... };` (pay attention to camel-case naming).
-
-For `Tuple` input types, it is required to specify the input schema using Storm's `Fields` class.
-For this case, the constructor of `BoltWrapper` takes an additional argument: `new BoltWrapper<Tuple1<String>, ...>(..., new Fields("sentence"))`.
-The input type is `Tuple1<String>` and `Fields("sentence")` specify that `input.getStringByField("sentence")` is equivalent to `input.getString(0)`.
-
-See [BoltTokenizerWordCountPojo](https://github.com/apache/flink/tree/master/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java) and [BoltTokenizerWordCountWithNames](https://github.com/apache/flink/tree/master/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java) for examples.
-
-## Configuring Spouts and Bolts
-
-In Storm, Spouts and Bolts can be configured with a globally distributed `Map` object that is given to `submitTopology(...)` method of `LocalCluster` or `StormSubmitter`.
-This `Map` is provided by the user next to the topology and gets forwarded as a parameter to the calls `Spout.open(...)` and `Bolt.prepare(...)`.
-
-To replicate this functionality Flink's configuration mechanism must be used.
-A global configuration can be set in a `StreamExecutionEnvironment` via `.getConfig().setGlobalJobParameters(...)`.
-Flink's regular `Configuration` class can be used to configure Spouts and Bolts.
-However, `Configuration` does not support arbitrary key data types as Storm does (only `String` keys are allowed).
-Thus, Flink additionally provides `StormConfig` class that can be used like a raw `Map` to provide full compatibility to Storm.
-
-<div class="codetabs" markdown="1">
-<div data-lang="java" markdown="1">
-{% highlight java %}
-StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
-StormConfig config = new StormConfig();
-// set config values
-[...]
-
-// set global Storm configuration
-env.getConfig().setGlobalJobParameters(config);
-
-// assemble program with embedded Spouts and/or Bolts
-[...]
-{% endhighlight %}
-</div>
-</div>
-
-## Multiple Output Streams
-
-Flink can also handle the declaration of multiple output streams for Spouts and Bolts.
-
-The output stream will be of data type `SplitStreamType<T>` and must be split by using `DataStream.split(...)` and `SplitStream.select(...)`.
-Flink provides the predefined output selector `StormStreamSelector<T>` for `.split(...)` already.
-Furthermore, the wrapper type `SplitStreamTuple<T>` can be removed using `SplitStreamMapper<T>`.
-
-<div class="codetabs" markdown="1">
-<div data-lang="java" markdown="1">
-{% highlight java %}
-[...]
-
-// get DataStream from Spout or Bolt which declares two output streams s1 and s2 with output type SomeType
-DataStream<SplitStreamType<SomeType>> multiStream = ...
-
-SplitStream<SplitStreamType<SomeType>> splitStream = multiStream.split(new StormStreamSelector<SomeType>());
-
-// remove SplitStreamType using SplitStreamMapper to get data stream of type SomeType
-DataStream<SomeType> s1 = splitStream.select("s1").map(new SplitStreamMapper<SomeType>()).returns(SomeType.class);
-DataStream<SomeType> s2 = splitStream.select("s2").map(new SplitStreamMapper<SomeType>()).returns(SomeType.class);
-
-// do further processing on s1 and s2
-[...]
-{% endhighlight %}
-</div>
-</div>
-
-See [SpoutSplitExample.java](https://github.com/apache/flink/tree/master/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java) for a full example.
-
-# Flink Extensions
-
-## Finite Spouts
-
-In Flink, streaming sources can be finite, ie, emit a finite number of records and stop after emitting the last record. However, Spouts usually emit infinite streams.
-The bridge between the two approaches is the `FiniteSpout` interface which, in addition to `IRichSpout`, contains a `reachedEnd()` method, where the user can specify a stopping-condition.
-The user can create a finite Spout by implementing this interface instead of (or additionally to) `IRichSpout`, and implementing the `reachedEnd()` method in addition.
-In contrast to a `SpoutWrapper` that is configured to emit a finite number of tuples, `FiniteSpout` interface allows to implement more complex termination criteria.
-
-Although finite Spouts are not necessary to embed Spouts into a Flink streaming program or to submit a whole Storm topology to Flink, there are cases where they may come in handy:
-
- * to achieve that a native Spout behaves the same way as a finite Flink source with minimal modifications
- * the user wants to process a stream only for some time; after that, the Spout can stop automatically
- * reading a file into a stream
- * for testing purposes
-
-An example of a finite Spout that emits records for 10 seconds only:
-
-<div class="codetabs" markdown="1">
-<div data-lang="java" markdown="1">
-{% highlight java %}
-public class TimedFiniteSpout extends BaseRichSpout implements FiniteSpout {
- [...] // implement open(), nextTuple(), ...
-
- private long starttime = System.currentTimeMillis();
-
- public boolean reachedEnd() {
- return System.currentTimeMillis() - starttime > 10000l;
- }
-}
-{% endhighlight %}
-</div>
-</div>
-
-# Storm Compatibility Examples
-
-You can find more examples in Maven module `flink-storm-examples`.
-For the different versions of WordCount, see [README.md](https://github.com/apache/flink/tree/master/flink-contrib/flink-storm-examples/README.md).
-To run the examples, you need to assemble a correct jar file.
-`flink-storm-examples-{{ site.version }}.jar` is **no** valid jar file for job execution (it is only a standard maven artifact).
-
-There are example jars for embedded Spout and Bolt, namely `WordCount-SpoutSource.jar` and `WordCount-BoltTokenizer.jar`, respectively.
-Compare `pom.xml` to see how both jars are built.
-
-You can run each of those examples via `bin/flink run <jarname>.jar`. The correct entry point class is contained in each jar's manifest file.
-
-{% top %}
diff --git a/docs/redirects/storm_compat.md b/docs/redirects/storm_compat.md
deleted file mode 100644
index f2df117..0000000
--- a/docs/redirects/storm_compat.md
+++ /dev/null
@@ -1,24 +0,0 @@
----
-title: "Storm Compatibility"
-layout: redirect
-redirect: /dev/libs/storm_compatibility.html
-permalink: /apis/streaming/storm_compatibility.html
----
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
--->
diff --git a/flink-contrib/flink-storm-examples/README.md b/flink-contrib/flink-storm-examples/README.md
deleted file mode 100644
index 00d7906..0000000
--- a/flink-contrib/flink-storm-examples/README.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# flink-storm-examples
-
-This module contains multiple versions of a simple Word-Count example to illustrate the usage of the compatibility layer:
-* the usage of spouts and bolts within a regular Flink streaming program (ie, embedded mode)
- 1. `SpoutSourceWordCount` uses a spout as data source within a Flink streaming program
- 2. `BoltTokenizeerWordCount` uses a bolt to split sentences into words within a Flink streaming program
- * `BoltTokenizeerWordCountWithNames` used `Tuple` input type and accesses attributes by field names (rather than index)
- * `BoltTokenizeerWordCountPOJO` used POJO input type and accesses attributes by field names (rather than index)
-
-* how to submit a whole Storm topology to Flink
- 3. `WordCountTopology` plugs a Storm topology together
- * `StormWordCountLocal` submits the topology to a local Flink cluster (similar to a `LocalCluster` in Storm)
- (`WordCountLocalByName` accesses attributes by field names rather than index)
- * `WordCountRemoteByClient` submits the topology to a remote Flink cluster (similar to the usage of `NimbusClient` in Storm)
- * `WordCountRemoteBySubmitter` submits the topology to a remote Flink cluster (similar to the usage of `StormSubmitter` in Storm)
-
-Additionally, this module package the three example Word-Count programs as jar files to be submitted to a Flink cluster via `bin/flink run example.jar`.
-(Valid jars are `WordCount-SpoutSource.jar`, `WordCount-BoltTokenizer.jar`, and `WordCount-StormTopology.jar`)
-
-The package `org.apache.flink.storm.wordcount.operators` contains original spouts and bolts that can be used unmodified within Storm or Flink.
diff --git a/flink-contrib/flink-storm-examples/pom.xml b/flink-contrib/flink-storm-examples/pom.xml
deleted file mode 100644
index 6c8bcab..0000000
--- a/flink-contrib/flink-storm-examples/pom.xml
+++ /dev/null
@@ -1,318 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-contrib</artifactId>
- <version>1.8-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
-
- <artifactId>flink-storm-examples_${scala.binary.version}</artifactId>
- <name>flink-storm-examples</name>
-
- <packaging>jar</packaging>
-
-
- <repositories>
- <!-- This repository is needed as a stable source for some Clojure libraries -->
- <repository>
- <id>clojars</id>
- <url>https://clojars.org/repo/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
-
-
- <dependencies>
-
- <!-- core dependencies -->
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-storm_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.storm</groupId>
- <artifactId>storm-starter</artifactId>
- <version>1.0.0</version>
-
- <!-- remove storm dependency - it should be drawn only (with proper
- customization) via the 'flink-storm' dependency -->
- <exclusions>
- <exclusion>
- <groupId>org.apache.storm</groupId>
- <artifactId>storm-core</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.curator</groupId>
- <artifactId>curator-framework</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <!-- test dependencies -->
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-shaded-guava</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-test-utils_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <scope>test</scope>
- <type>test-jar</type>
- </dependency>
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
-
- <!-- get default data from flink-example-batch package -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>2.9</version><!--$NO-MVN-MAN-VER$-->
- <executions>
- <execution>
- <id>unpack</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>unpack</goal>
- </goals>
- <configuration>
- <artifactItems>
- <artifactItem>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-storm_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <type>jar</type>
- <overWrite>false</overWrite>
- <outputDirectory>${project.build.directory}/classes</outputDirectory>
- </artifactItem>
- <artifactItem>
- <groupId>org.apache.storm</groupId>
- <artifactId>storm-core</artifactId>
- <version>1.0.0</version>
- <type>jar</type>
- <overWrite>false</overWrite>
- <outputDirectory>${project.build.directory}/classes</outputDirectory>
- <!-- need to exclude to be able to run
- * StormWordCountRemoteByClient and
- * StormWordCountRemoteBySubmitter
- within Eclipse -->
- <excludes>defaults.yaml</excludes>
- </artifactItem>
- <artifactItem>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>1.1</version>
- <type>jar</type>
- <overWrite>false</overWrite>
- <outputDirectory>${project.build.directory}/classes</outputDirectory>
- </artifactItem>
- <artifactItem>
- <groupId>org.yaml</groupId>
- <artifactId>snakeyaml</artifactId>
- <version>1.11</version>
- <type>jar</type>
- <overWrite>false</overWrite>
- <outputDirectory>${project.build.directory}/classes</outputDirectory>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
-
- <!-- self-contained jars for each example -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
-
- <executions>
-
- <!-- WordCount Spout source-->
- <!-- example for embedded spout - for whole topologies see "WordCount Storm topology" example below -->
- <execution>
- <id>WordCount-SpoutSource</id>
- <phase>package</phase>
- <goals>
- <goal>jar</goal>
- </goals>
- <configuration>
- <finalName>WordCount</finalName>
- <classifier>SpoutSource</classifier>
-
- <archive>
- <manifestEntries>
- <program-class>org.apache.flink.storm.wordcount.SpoutSourceWordCount</program-class>
- </manifestEntries>
- </archive>
-
- <includes>
- <!-- from storm-core -->
- <include>org/apache/storm/topology/*.class</include>
- <include>org/apache/storm/spout/*.class</include>
- <include>org/apache/storm/task/*.class</include>
- <include>org/apache/storm/tuple/*.class</include>
- <include>org/apache/storm/generated/*.class</include>
- <include>org/apache/storm/metric/**/*.class</include>
- <include>org/apache/storm/thrift/**/*.class</include>
- <!-- Storm's recursive dependencies -->
- <include>org/json/simple/**/*.class</include>
- <include>org/apache/storm/shade/**/*.class</include>
- <!-- compatibility layer -->
- <include>org/apache/flink/storm/api/*.class</include>
- <include>org/apache/flink/storm/util/*.class</include>
- <include>org/apache/flink/storm/wrappers/*.class</include>
- <!-- Word Count -->
- <include>org/apache/flink/storm/wordcount/SpoutSourceWordCount.class</include>
- <include>org/apache/flink/storm/wordcount/SpoutSourceWordCount$*.class</include>
- <include>org/apache/flink/storm/wordcount/operators/WordCountFileSpout.class</include>
- <include>org/apache/flink/storm/wordcount/operators/WordCountInMemorySpout.class
- </include>
- <include>org/apache/flink/storm/util/AbstractLineSpout.class</include>
- <include>org/apache/flink/storm/util/FileSpout.class</include>
- <include>org/apache/flink/storm/util/InMemorySpout.class</include>
- <include>org/apache/flink/storm/wordcount/util/WordCountData.class</include>
- </includes>
- </configuration>
- </execution>
-
- <!-- WordCount Bolt tokenizer-->
- <!-- example for embedded bolt - for whole topologies see "WordCount Storm topology" example below -->
- <execution>
- <id>WordCount-BoltTokenizer</id>
- <phase>package</phase>
- <goals>
- <goal>jar</goal>
- </goals>
- <configuration>
- <finalName>WordCount</finalName>
- <classifier>BoltTokenizer</classifier>
-
- <archive>
- <manifestEntries>
- <program-class>org.apache.flink.storm.wordcount.BoltTokenizerWordCount
- </program-class>
- </manifestEntries>
- </archive>
-
- <includes>
- <!-- from storm-core -->
- <include>org/apache/storm/topology/*.class</include>
- <include>org/apache/storm/spout/*.class</include>
- <include>org/apache/storm/task/*.class</include>
- <include>org/apache/storm/tuple/*.class</include>
- <include>org/apache/storm/generated/*.class</include>
- <include>org/apache/storm/metric/**/*.class</include>
- <include>org/apache/storm/thrift/**/*.class</include>
- <!-- Storm's recursive dependencies -->
- <include>org/json/simple/**/*.class</include>
- <include>org/apache/storm/shade/**/*.class</include>
- <!-- compatibility layer -->
- <include>org/apache/flink/storm/api/*.class</include>
- <include>org/apache/flink/storm/util/*.class</include>
- <include>org/apache/flink/storm/wrappers/*.class</include>
- <!-- Word Count -->
- <include>org/apache/flink/storm/wordcount/BoltTokenizerWordCount.class</include>
- <include>org/apache/flink/storm/wordcount/operators/BoltTokenizer.class</include>
- <include>org/apache/flink/storm/wordcount/util/WordCountData.class</include>
- </includes>
- </configuration>
- </execution>
-
- <execution>
- <goals>
- <goal>test-jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
-
- <pluginManagement>
- <plugins>
- <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
- <plugin>
- <groupId>org.eclipse.m2e</groupId>
- <artifactId>lifecycle-mapping</artifactId>
- <version>1.0.0</version>
- <configuration>
- <lifecycleMappingMetadata>
- <pluginExecutions>
- <pluginExecution>
- <pluginExecutionFilter>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <versionRange>[2.9,)</versionRange>
- <goals>
- <goal>unpack</goal>
- </goals>
- </pluginExecutionFilter>
- <action>
- <ignore/>
- </action>
- </pluginExecution>
- </pluginExecutions>
- </lifecycleMappingMetadata>
- </configuration>
- </plugin>
- </plugins>
- </pluginManagement>
-
- </build>
-
-</project>
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithBolt.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithBolt.java
deleted file mode 100644
index b6bb4d5..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithBolt.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation;
-
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.storm.exclamation.operators.ExclamationBolt;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.BoltWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-import org.apache.storm.utils.Utils;
-
-/**
- * Implements the "Exclamation" program that attaches 3+x exclamation marks to every line of a text files in a streaming
- * fashion. The program is constructed as a regular {@link org.apache.storm.generated.StormTopology}.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage:
- * <code>ExclamationWithBolt <text path> <result path> <number of exclamation marks></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData} with x=2.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>use a Bolt within a Flink Streaming program</li>
- * <li>how to configure a Bolt using StormConfig</li>
- * </ul>
- */
-public class ExclamationWithBolt {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // set Storm configuration
- StormConfig config = new StormConfig();
- config.put(ExclamationBolt.EXCLAMATION_COUNT, new Integer(exclamationNum));
- env.getConfig().setGlobalJobParameters(config);
-
- // get input data
- final DataStream<String> text = getTextDataStream(env);
-
- final DataStream<String> exclaimed = text
- .transform("StormBoltTokenizer",
- TypeExtractor.getForObject(""),
- new BoltWrapper<String, String>(new ExclamationBolt(),
- new String[] { Utils.DEFAULT_STREAM_ID }))
- .map(new ExclamationMap());
-
- // emit result
- if (fileOutput) {
- exclaimed.writeAsText(outputPath);
- } else {
- exclaimed.print();
- }
-
- // execute program
- env.execute("Streaming WordCount with bolt tokenizer");
- }
-
- // *************************************************************************
- // USER FUNCTIONS
- // *************************************************************************
-
- private static class ExclamationMap implements MapFunction<String, String> {
- private static final long serialVersionUID = 4614754344067170619L;
-
- @Override
- public String map(String value) throws Exception {
- return value + "!!!";
- }
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
- private static int exclamationNum = 2;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 3) {
- textPath = args[0];
- outputPath = args[1];
- exclamationNum = Integer.parseInt(args[2]);
- } else {
- System.err.println("Usage: ExclamationWithBolt <text path> <result path> <number of exclamation marks>");
- return false;
- }
- } else {
- System.out.println("Executing ExclamationWithBolt example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: ExclamationWithBolt <text path> <result path> <number of exclamation marks>");
- }
- return true;
- }
-
- private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- // read the text file from given input path
- return env.readTextFile(textPath);
- }
-
- return env.fromElements(WordCountData.WORDS);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithSpout.java
deleted file mode 100644
index c86ab69..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/ExclamationWithSpout.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation;
-
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.storm.util.FiniteFileSpout;
-import org.apache.flink.storm.util.FiniteInMemorySpout;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.SpoutWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-import org.apache.storm.utils.Utils;
-
-/**
- * Implements the "Exclamation" program that attaches six exclamation marks to every line of a text files in a streaming
- * fashion. The program is constructed as a regular {@link org.apache.storm.generated.StormTopology}.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage: <code>ExclamationWithSpout <text path> <result path></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>use a Storm spout within a Flink Streaming program</li>
- * <li>make use of the FiniteSpout interface</li>
- * <li>make use of the FiniteSpout interface</li>
- * <li>how to configure a Spout using StormConfig</li>
- * </ul>
- */
-public class ExclamationWithSpout {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // get input data
- final DataStream<String> text = getTextDataStream(env);
-
- final DataStream<String> exclaimed = text
- .map(new ExclamationMap())
- .map(new ExclamationMap());
-
- // emit result
- if (fileOutput) {
- exclaimed.writeAsText(outputPath);
- } else {
- exclaimed.print();
- }
-
- // execute program
- env.execute("Streaming Exclamation with Storm spout source");
- }
-
- // *************************************************************************
- // USER FUNCTIONS
- // *************************************************************************
-
- private static class ExclamationMap implements MapFunction<String, String> {
- private static final long serialVersionUID = -684993133807698042L;
-
- @Override
- public String map(String value) throws Exception {
- return value + "!!!";
- }
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 2) {
- textPath = args[0];
- outputPath = args[1];
- } else {
- System.err.println("Usage: ExclamationWithSpout <text path> <result path>");
- return false;
- }
- } else {
- System.out.println("Executing ExclamationWithSpout example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: ExclamationWithSpout <text path> <result path>");
- }
- return true;
- }
-
- private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- final String[] tokens = textPath.split(":");
- final String inputFile = tokens[tokens.length - 1];
-
- // set Storm configuration
- StormConfig config = new StormConfig();
- config.put(FiniteFileSpout.INPUT_FILE_PATH, inputFile);
- env.getConfig().setGlobalJobParameters(config);
-
- return env.addSource(
- new SpoutWrapper<String>(new FiniteFileSpout(),
- new String[] { Utils.DEFAULT_STREAM_ID }),
- TypeExtractor.getForClass(String.class)).setParallelism(1);
- }
-
- return env.addSource(
- new SpoutWrapper<String>(new FiniteInMemorySpout(
- WordCountData.WORDS), new String[] { Utils.DEFAULT_STREAM_ID }),
- TypeExtractor.getForClass(String.class)).setParallelism(1);
-
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/operators/ExclamationBolt.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/operators/ExclamationBolt.java
deleted file mode 100644
index 8872acd..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/exclamation/operators/ExclamationBolt.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * A Bolt implementation that appends exclamation marks to incoming tuples. The number of added exclamation marks can
- * be controlled by setting <code>exclamation.count</code>.
- */
-public class ExclamationBolt implements IRichBolt {
- private static final long serialVersionUID = -6364882114201311380L;
-
- public static final String EXCLAMATION_COUNT = "exclamation.count";
-
- private OutputCollector collector;
- private String exclamation;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
- this.collector = collector;
-
- Object count = conf.get(EXCLAMATION_COUNT);
- if (count != null) {
- int exclamationNum = (Integer) count;
- StringBuilder builder = new StringBuilder();
- for (int index = 0; index < exclamationNum; ++index) {
- builder.append('!');
- }
- this.exclamation = builder.toString();
- } else {
- this.exclamation = "!";
- }
- }
-
- @Override
- public void cleanup() {
- }
-
- @Override
- public void execute(Tuple tuple) {
- collector.emit(tuple, new Values(tuple.getString(0) + this.exclamation));
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("word"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
deleted file mode 100644
index c5bb5c3..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/SpoutSplitExample.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.split;
-
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.storm.split.operators.RandomSpout;
-import org.apache.flink.storm.split.operators.VerifyAndEnrichBolt;
-import org.apache.flink.storm.util.SplitStreamMapper;
-import org.apache.flink.storm.util.SplitStreamType;
-import org.apache.flink.storm.util.StormStreamSelector;
-import org.apache.flink.storm.wrappers.BoltWrapper;
-import org.apache.flink.storm.wrappers.SpoutWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.datastream.SplitStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-/**
- * Implements a simple example with two declared output streams for the embedded spout.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>handle multiple output stream of a spout</li>
- * <li>accessing each stream by .split(...) and .select(...)</li>
- * <li>strip wrapper data type SplitStreamType for further processing in Flink</li>
- * </ul>
- *
- * <p>This example would work the same way for multiple bolt output streams.
- */
-public class SpoutSplitExample {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- boolean useFile = SpoutSplitExample.parseParameters(args);
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- String[] rawOutputs = new String[] { RandomSpout.EVEN_STREAM, RandomSpout.ODD_STREAM };
-
- final DataStream<SplitStreamType<Integer>> numbers = env.addSource(
- new SpoutWrapper<SplitStreamType<Integer>>(new RandomSpout(true, seed), rawOutputs,
- 1000), TypeExtractor.getForObject(new SplitStreamType<Integer>()));
-
- SplitStream<SplitStreamType<Integer>> splitStream = numbers
- .split(new StormStreamSelector<Integer>());
-
- DataStream<SplitStreamType<Integer>> evenStream = splitStream.select(RandomSpout.EVEN_STREAM);
- DataStream<SplitStreamType<Integer>> oddStream = splitStream.select(RandomSpout.ODD_STREAM);
-
- DataStream<Tuple2<String, Integer>> evenResult = evenStream
- .map(new SplitStreamMapper<Integer>()).returns(Integer.class).map(new Enrich(true));
- DataStream<Tuple2<String, Integer>> oddResult = oddStream.map(
- new SplitStreamMapper<Integer>()).transform("oddBolt",
- TypeExtractor.getForObject(new Tuple2<String, Integer>("", 0)),
- new BoltWrapper<Integer, Tuple2<String, Integer>>(new VerifyAndEnrichBolt(false)));
-
- if (useFile) {
- evenResult.writeAsText(outputPath + "/even");
- oddResult.writeAsText(outputPath + "/odd");
- } else {
- evenResult.print();
- oddResult.print();
- }
-
- // execute program
- env.execute("Spout split stream example");
- }
-
- // *************************************************************************
- // USER FUNCTIONS
- // *************************************************************************
-
- /**
- * Same as {@link VerifyAndEnrichBolt}.
- */
- public static final class Enrich implements MapFunction<Integer, Tuple2<String, Integer>> {
- private static final long serialVersionUID = 5213888269197438892L;
- private final Tuple2<String, Integer> out;
- private final boolean isEven;
-
- public static boolean errorOccured = false;
-
- public Enrich(boolean isEven) {
- this.isEven = isEven;
- if (isEven) {
- this.out = new Tuple2<String, Integer>("even", 0);
- } else {
- this.out = new Tuple2<String, Integer>("odd", 0);
- }
- }
-
- @Override
- public Tuple2<String, Integer> map(Integer value) throws Exception {
- if ((value.intValue() % 2 == 0) != this.isEven) {
- errorOccured = true;
- }
- this.out.setField(value, 1);
- return this.out;
- }
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static long seed = System.currentTimeMillis();
- private static String outputPath = null;
-
- static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- if (args.length == 2) {
- seed = Long.parseLong(args[0]);
- outputPath = args[1];
- return true;
- } else {
- throw new IllegalArgumentException(
- "Usage: SplitStreamBoltLocal <seed> <result path>");
- }
- } else {
- System.out.println("Executing SplitBoltTopology example with random data");
- System.out.println(" Usage: SplitStreamBoltLocal <seed> <result path>");
- }
-
- return false;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/RandomSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/RandomSpout.java
deleted file mode 100644
index afec47f..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/RandomSpout.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.split.operators;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-import java.util.Random;
-
-/**
- * A Spout implementation that emits random numbers, optionally splitting them into odd/even streams.
- */
-public class RandomSpout extends BaseRichSpout {
- private static final long serialVersionUID = -3978554318742509334L;
-
- public static final String EVEN_STREAM = "even";
- public static final String ODD_STREAM = "odd";
-
- private final boolean split;
- private Random r = new Random();
- private SpoutOutputCollector collector;
-
- public RandomSpout(boolean split, long seed) {
- this.split = split;
- this.r = new Random(seed);
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void nextTuple() {
- int i = r.nextInt();
- if (split) {
- if (i % 2 == 0) {
- this.collector.emit(EVEN_STREAM, new Values(i));
- } else {
- this.collector.emit(ODD_STREAM, new Values(i));
- }
- } else {
- this.collector.emit(new Values(i));
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- Fields schema = new Fields("number");
- if (split) {
- declarer.declareStream(EVEN_STREAM, schema);
- declarer.declareStream(ODD_STREAM, schema);
- } else {
- declarer.declare(schema);
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/VerifyAndEnrichBolt.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/VerifyAndEnrichBolt.java
deleted file mode 100644
index a39ec9c..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/split/operators/VerifyAndEnrichBolt.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.split.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseRichBolt;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * Verifies that incoming numbers are either even or odd, controlled by the constructor argument. Emitted tuples are
- * enriched with a new string field containing either "even" or "odd", based on the number's parity.
- */
-public class VerifyAndEnrichBolt extends BaseRichBolt {
- private static final long serialVersionUID = -7277395570966328721L;
-
- private final boolean evenOrOdd; // true: even -- false: odd
- private final String token;
- private OutputCollector collector;
-
- public static boolean errorOccured = false;
-
- public VerifyAndEnrichBolt(boolean evenOrOdd) {
- this.evenOrOdd = evenOrOdd;
- this.token = evenOrOdd ? "even" : "odd";
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void execute(Tuple input) {
- if ((input.getInteger(0) % 2 == 0) != this.evenOrOdd) {
- errorOccured = true;
- }
- this.collector.emit(new Values(this.token, input.getInteger(0)));
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("evenOrOdd", "number"));
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractBoltSink.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractBoltSink.java
deleted file mode 100644
index 5ae8cfb..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractBoltSink.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Tuple;
-
-import java.util.Map;
-
-/**
- * Implements a sink that write the received data so some external output. The result is formatted like
- * {@code (a1, a2, ..., an)} with {@code Object.toString()} for each attribute).
- */
-public abstract class AbstractBoltSink implements IRichBolt {
- private static final long serialVersionUID = -1626323806848080430L;
-
- private StringBuilder lineBuilder;
- private String prefix = "";
- private final OutputFormatter formatter;
-
- public AbstractBoltSink(final OutputFormatter formatter) {
- this.formatter = formatter;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public final void prepare(final Map stormConf, final TopologyContext context,
- final OutputCollector collector) {
- this.prepareSimple(stormConf, context);
- if (context.getComponentCommon(context.getThisComponentId()).get_parallelism_hint() > 1) {
- this.prefix = context.getThisTaskId() + "> ";
- }
- }
-
- protected abstract void prepareSimple(Map<?, ?> stormConf, TopologyContext context);
-
- @Override
- public final void execute(final Tuple input) {
- this.lineBuilder = new StringBuilder();
- this.lineBuilder.append(this.prefix);
- this.lineBuilder.append(this.formatter.format(input));
- this.writeExternal(this.lineBuilder.toString());
- }
-
- protected abstract void writeExternal(String line);
-
- @Override
- public void cleanup() {/* nothing to do */}
-
- @Override
- public final void declareOutputFields(final OutputFieldsDeclarer declarer) {/* nothing to do */}
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractLineSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractLineSpout.java
deleted file mode 100644
index caefd56..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/AbstractLineSpout.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-
-import java.util.Map;
-
-/**
- * Base class for Spouts that read data line by line from an arbitrary source. The declared output schema has a single
- * attribute called {@code line} and should be of type {@link String}.
- */
-public abstract class AbstractLineSpout implements IRichSpout {
- private static final long serialVersionUID = 8876828403487806771L;
-
- public static final String ATTRIBUTE_LINE = "line";
-
- protected SpoutOutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(final Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void close() {/* noting to do */}
-
- @Override
- public void activate() {/* noting to do */}
-
- @Override
- public void deactivate() {/* noting to do */}
-
- @Override
- public void ack(final Object msgId) {/* noting to do */}
-
- @Override
- public void fail(final Object msgId) {/* noting to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields(ATTRIBUTE_LINE));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltFileSink.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltFileSink.java
deleted file mode 100644
index cbbe191..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltFileSink.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.task.TopologyContext;
-
-import java.io.BufferedWriter;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Implements a sink that write the received data to the given file (as a result of {@code Object.toString()} for each
- * attribute).
- */
-public final class BoltFileSink extends AbstractBoltSink {
- private static final long serialVersionUID = 2014027288631273666L;
-
- private final String path;
- private BufferedWriter writer;
-
- public BoltFileSink(final String path) {
- this(path, new SimpleOutputFormatter());
- }
-
- public BoltFileSink(final String path, final OutputFormatter formatter) {
- super(formatter);
- this.path = path;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepareSimple(final Map stormConf, final TopologyContext context) {
- try {
- this.writer = new BufferedWriter(new FileWriter(this.path));
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void writeExternal(final String line) {
- try {
- this.writer.write(line + "\n");
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void cleanup() {
- if (this.writer != null) {
- try {
- this.writer.close();
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltPrintSink.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltPrintSink.java
deleted file mode 100644
index a80417b..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/BoltPrintSink.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.task.TopologyContext;
-
-import java.util.Map;
-
-/**
- * Implements a sink that prints the received data to {@code stdout}.
- */
-public final class BoltPrintSink extends AbstractBoltSink {
- private static final long serialVersionUID = -6650011223001009519L;
-
- public BoltPrintSink(OutputFormatter formatter) {
- super(formatter);
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepareSimple(final Map stormConf, final TopologyContext context) {
- /* nothing to do */
- }
-
- @Override
- public void writeExternal(final String line) {
- System.out.println(line);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FileSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FileSpout.java
deleted file mode 100644
index 0533b09..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FileSpout.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.tuple.Values;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Implements a Spout that reads data from a given local file.
- */
-public class FileSpout extends AbstractLineSpout {
- private static final long serialVersionUID = -6996907090003590436L;
-
- public static final String INPUT_FILE_PATH = "input.path";
-
- protected String path = null;
- protected BufferedReader reader;
-
- public FileSpout() {}
-
- public FileSpout(final String path) {
- this.path = path;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(final Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
- super.open(conf, context, collector);
-
- Object configuredPath = conf.get(INPUT_FILE_PATH);
- if (configuredPath != null) {
- this.path = (String) configuredPath;
- }
-
- try {
- this.reader = new BufferedReader(new FileReader(this.path));
- } catch (final FileNotFoundException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void close() {
- if (this.reader != null) {
- try {
- this.reader.close();
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
- }
- }
-
- @Override
- public void nextTuple() {
- String line;
- try {
- line = this.reader.readLine();
- if (line != null) {
- this.collector.emit(new Values(line));
- }
- } catch (final IOException e) {
- throw new RuntimeException(e);
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteFileSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteFileSpout.java
deleted file mode 100644
index e4f39ab..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteFileSpout.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.tuple.Values;
-
-import java.io.IOException;
-import java.util.Map;
-
-/**
- * Implements a Spout that reads data from a given local file. The spout stops automatically
- * when it reached the end of the file.
- */
-public class FiniteFileSpout extends FileSpout implements FiniteSpout {
- private static final long serialVersionUID = -1472978008607215864L;
-
- private String line;
- private boolean newLineRead;
-
- public FiniteFileSpout() {}
-
- public FiniteFileSpout(String path) {
- super(path);
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(final Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
- super.open(conf, context, collector);
- newLineRead = false;
- }
-
- @Override
- public void nextTuple() {
- this.collector.emit(new Values(line));
- newLineRead = false;
- }
-
- /**
- * Can be called before nextTuple() any times including 0.
- */
- @Override
- public boolean reachedEnd() {
- try {
- readLine();
- } catch (IOException e) {
- throw new RuntimeException("Exception occured while reading file " + path);
- }
- return line == null;
- }
-
- private void readLine() throws IOException {
- if (!newLineRead) {
- line = reader.readLine();
- newLineRead = true;
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteInMemorySpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteInMemorySpout.java
deleted file mode 100644
index ff89a41..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/FiniteInMemorySpout.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-/**
- * Implements a Spout that reads String[] data stored in memory. The Spout stops automatically when it emitted all of
- * the data.
- */
-public class FiniteInMemorySpout extends InMemorySpout<String> implements FiniteSpout {
- private static final long serialVersionUID = -4008858647468647019L;
-
- public FiniteInMemorySpout(String[] source) {
- super(source);
- }
-
- @Override
- public boolean reachedEnd() {
- return counter >= source.length;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/InMemorySpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/InMemorySpout.java
deleted file mode 100644
index de1ca20..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/InMemorySpout.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.tuple.Values;
-
-/**
- * Implements a Spout that reads data stored in memory.
- */
-public class InMemorySpout<T> extends AbstractLineSpout {
- private static final long serialVersionUID = -4008858647468647019L;
-
- protected T[] source;
- protected int counter = 0;
-
- public InMemorySpout(T[] source) {
- this.source = source;
- }
-
- @Override
- public void nextTuple() {
- if (this.counter < source.length) {
- this.collector.emit(new Values(source[this.counter++]));
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/OutputFormatter.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/OutputFormatter.java
deleted file mode 100644
index a0f933f..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/OutputFormatter.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.tuple.Tuple;
-
-import java.io.Serializable;
-
-/**
- * Interface that is used to convert Storm {@link Tuple Tuples} to a string before writing them out to a file or to the
- * console.
- */
-public interface OutputFormatter extends Serializable {
-
- /**
- * Converts a Storm {@link Tuple} to a string. This method is used for formatting the output tuples before writing
- * them out to a file or to the console.
- *
- * @param input
- * The tuple to be formatted
- * @return The string result of the formatting
- */
- String format(Tuple input);
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/SimpleOutputFormatter.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/SimpleOutputFormatter.java
deleted file mode 100644
index bf30cd2..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/SimpleOutputFormatter.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.tuple.Tuple;
-
-/**
- * Simple {@link OutputFormatter} implementation to convert {@link Tuple Tuples} with a size of 1 by returning the
- * result of {@link Object#toString()} for the first field.
- */
-public class SimpleOutputFormatter implements OutputFormatter {
- private static final long serialVersionUID = 6349573860144270338L;
-
- /**
- * Converts a Storm {@link Tuple} with 1 field to a string by retrieving the value of that field. This method is
- * used for formatting raw outputs wrapped in tuples, before writing them out to a file or to the console.
- *
- * @param input
- * The tuple to be formatted
- * @return The string result of the formatting
- */
- @Override
- public String format(final Tuple input) {
- if (input.getValues().size() != 1) {
- throw new RuntimeException("The output is not raw");
- }
- return input.getValue(0).toString();
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/TupleOutputFormatter.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/TupleOutputFormatter.java
deleted file mode 100644
index 42189a7..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/util/TupleOutputFormatter.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.tuple.Tuple;
-
-/**
- * {@link OutputFormatter} implementation that converts {@link Tuple Tuples} of arbitrary size to a string. For a given
- * tuple the output is <code>(field1,field2,...,fieldX)</code>.
- */
-public class TupleOutputFormatter implements OutputFormatter {
- private static final long serialVersionUID = -599665757723851761L;
-
- @Override
- public String format(final Tuple input) {
- final StringBuilder stringBuilder = new StringBuilder();
- stringBuilder.append("(");
- for (final Object attribute : input.getValues()) {
- stringBuilder.append(attribute);
- stringBuilder.append(",");
- }
- stringBuilder.replace(stringBuilder.length() - 1, stringBuilder.length(), ")");
- return stringBuilder.toString();
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
deleted file mode 100644
index 54a6080..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCount.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.storm.wordcount.operators.BoltTokenizer;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.BoltWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-import org.apache.storm.topology.IRichBolt;
-
-/**
- * Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
- * fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage: <code>WordCount <text path> <result path></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>use a Bolt within a Flink Streaming program.</li>
- * </ul>
- */
-public class BoltTokenizerWordCount {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // get input data
- final DataStream<String> text = getTextDataStream(env);
-
- final DataStream<Tuple2<String, Integer>> counts = text
- // split up the lines in pairs (2-tuples) containing: (word,1)
- // this is done by a bolt that is wrapped accordingly
- .transform("BoltTokenizer",
- TypeExtractor.getForObject(new Tuple2<String, Integer>("", 0)),
- new BoltWrapper<String, Tuple2<String, Integer>>(new BoltTokenizer()))
- // group by the tuple field "0" and sum up tuple field "1"
- .keyBy(0).sum(1);
-
- // emit result
- if (fileOutput) {
- counts.writeAsText(outputPath);
- } else {
- counts.print();
- }
-
- // execute program
- env.execute("Streaming WordCount with bolt tokenizer");
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 2) {
- textPath = args[0];
- outputPath = args[1];
- } else {
- System.err.println("Usage: BoltTokenizerWordCount <text path> <result path>");
- return false;
- }
- } else {
- System.out.println("Executing BoltTokenizerWordCount example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: BoltTokenizerWordCount <text path> <result path>");
- }
- return true;
- }
-
- private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- // read the text file from given input path
- return env.readTextFile(textPath);
- }
-
- return env.fromElements(WordCountData.WORDS);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
deleted file mode 100644
index 06c6a03..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojo.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.api.java.io.CsvInputFormat;
-import org.apache.flink.api.java.io.PojoCsvInputFormat;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.typeutils.PojoTypeInfo;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.core.fs.Path;
-import org.apache.flink.storm.wordcount.operators.BoltTokenizerByName;
-import org.apache.flink.storm.wordcount.operators.WordCountDataPojos;
-import org.apache.flink.storm.wordcount.operators.WordCountDataPojos.Sentence;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.BoltWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-import org.apache.storm.topology.IRichBolt;
-
-/**
- * Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
- * fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}. In contrast to {@link BoltTokenizerWordCount}
- * the tokenizer's input is a POJO type and the single field is accessed by name.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage: <code>WordCount <text path> <result path></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>how to access attributes by name within a Bolt for POJO type input streams
- * </ul>
- */
-public class BoltTokenizerWordCountPojo {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // get input data
- final DataStream<Sentence> text = getTextDataStream(env);
-
- final DataStream<Tuple2<String, Integer>> counts = text
- // split up the lines in pairs (2-tuples) containing: (word,1)
- // this is done by a bolt that is wrapped accordingly
- .transform("BoltTokenizerPojo",
- TypeExtractor.getForObject(new Tuple2<String, Integer>("", 0)),
- new BoltWrapper<Sentence, Tuple2<String, Integer>>(new BoltTokenizerByName()))
- // group by the tuple field "0" and sum up tuple field "1"
- .keyBy(0).sum(1);
-
- // emit result
- if (fileOutput) {
- counts.writeAsText(outputPath);
- } else {
- counts.print();
- }
-
- // execute program
- env.execute("Streaming WordCount with POJO bolt tokenizer");
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 2) {
- textPath = args[0];
- outputPath = args[1];
- } else {
- System.err.println("Usage: BoltTokenizerWordCountPojo <text path> <result path>");
- return false;
- }
- } else {
- System.out
- .println("Executing BoltTokenizerWordCountPojo example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: BoltTokenizerWordCountPojo <text path> <result path>");
- }
- return true;
- }
-
- private static DataStream<Sentence> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- // read the text file from given input path
- PojoTypeInfo<Sentence> sourceType = (PojoTypeInfo<Sentence>) TypeExtractor
- .getForObject(new Sentence(""));
- return env.createInput(new PojoCsvInputFormat<Sentence>(new Path(
- textPath), CsvInputFormat.DEFAULT_LINE_DELIMITER,
- CsvInputFormat.DEFAULT_LINE_DELIMITER, sourceType),
- sourceType);
- }
-
- return env.fromElements(WordCountDataPojos.SENTENCES);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
deleted file mode 100644
index b0bb85a..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNames.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.api.java.io.CsvInputFormat;
-import org.apache.flink.api.java.io.TupleCsvInputFormat;
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.typeutils.TupleTypeInfo;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.core.fs.Path;
-import org.apache.flink.storm.wordcount.operators.BoltTokenizerByName;
-import org.apache.flink.storm.wordcount.operators.WordCountDataTuple;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.BoltWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.tuple.Fields;
-
-/**
- * Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
- * fashion. The tokenizer step is performed by a {@link IRichBolt Bolt}. In contrast to {@link BoltTokenizerWordCount}
- * the tokenizer's input is a {@link Tuple} type and the single field is accessed by name.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage: <code>WordCount <text path> <result path></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>how to access attributes by name within a Bolt for {@link Tuple} type input streams
- * </ul>
- */
-public class BoltTokenizerWordCountWithNames {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // get input data
- final DataStream<Tuple1<String>> text = getTextDataStream(env);
-
- final DataStream<Tuple2<String, Integer>> counts = text
- // split up the lines in pairs (2-tuples) containing: (word,1)
- // this is done by a Storm bolt that is wrapped accordingly
- .transform(
- "BoltTokenizerWithNames",
- TypeExtractor.getForObject(new Tuple2<String, Integer>("", 0)),
- new BoltWrapper<Tuple1<String>, Tuple2<String, Integer>>(
- new BoltTokenizerByName(), new Fields("sentence")))
- // group by the tuple field "0" and sum up tuple field "1"
- .keyBy(0).sum(1);
-
- // emit result
- if (fileOutput) {
- counts.writeAsText(outputPath);
- } else {
- counts.print();
- }
-
- // execute program
- env.execute("Streaming WordCount with schema bolt tokenizer");
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 2) {
- textPath = args[0];
- outputPath = args[1];
- } else {
- System.err.println("Usage: BoltTokenizerWordCountWithNames <text path> <result path>");
- return false;
- }
- } else {
- System.out.println("Executing BoltTokenizerWordCountWithNames example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: BoltTokenizerWordCountWithNames <text path> <result path>");
- }
- return true;
- }
-
- private static DataStream<Tuple1<String>> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- // read the text file from given input path
- TupleTypeInfo<Tuple1<String>> sourceType = (TupleTypeInfo<Tuple1<String>>) TypeExtractor
- .getForObject(new Tuple1<String>(""));
- return env.createInput(new TupleCsvInputFormat<Tuple1<String>>(new Path(
- textPath), CsvInputFormat.DEFAULT_LINE_DELIMITER,
- CsvInputFormat.DEFAULT_LINE_DELIMITER, sourceType),
- sourceType);
- }
-
- return env.fromElements(WordCountDataTuple.TUPLES);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
deleted file mode 100644
index 7109c4a..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/SpoutSourceWordCount.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.api.common.functions.FlatMapFunction;
-import org.apache.flink.api.java.tuple.Tuple2;
-import org.apache.flink.api.java.typeutils.TypeExtractor;
-import org.apache.flink.storm.wordcount.operators.WordCountFileSpout;
-import org.apache.flink.storm.wordcount.operators.WordCountInMemorySpout;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-import org.apache.flink.storm.wrappers.SpoutWrapper;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-import org.apache.flink.util.Collector;
-
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.utils.Utils;
-
-/**
- * Implements the "WordCount" program that computes a simple word occurrence histogram over text files in a streaming
- * fashion. The used data source is a {@link IRichSpout Spout}.
- *
- * <p>The input is a plain text file with lines separated by newline characters.
- *
- * <p>Usage: <code>WordCount <text path> <result path></code><br>
- * If no parameters are provided, the program is run with default data from {@link WordCountData}.
- *
- * <p>This example shows how to:
- * <ul>
- * <li>use a Spout within a Flink Streaming program.</li>
- * </ul>
- */
-public class SpoutSourceWordCount {
-
- // *************************************************************************
- // PROGRAM
- // *************************************************************************
-
- public static void main(final String[] args) throws Exception {
-
- if (!parseParameters(args)) {
- return;
- }
-
- // set up the execution environment
- final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
-
- // get input data
- final DataStream<String> text = getTextDataStream(env);
-
- final DataStream<Tuple2<String, Integer>> counts =
- // split up the lines in pairs (2-tuples) containing: (word,1)
- text.flatMap(new Tokenizer())
- // group by the tuple field "0" and sum up tuple field "1"
- .keyBy(0).sum(1);
-
- // emit result
- if (fileOutput) {
- counts.writeAsText(outputPath);
- } else {
- counts.print();
- }
-
- // execute program
- env.execute("Streaming WordCount with spout source");
- }
-
- // *************************************************************************
- // USER FUNCTIONS
- // *************************************************************************
-
- /**
- * Implements the string tokenizer that splits sentences into words as a user-defined FlatMapFunction. The function
- * takes a line (String) and splits it into multiple pairs in the form of "(word,1)" ({@code Tuple2<String, Integer>}).
- */
- public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> {
- private static final long serialVersionUID = 1L;
-
- @Override
- public void flatMap(final String value, final Collector<Tuple2<String, Integer>> out) throws Exception {
- // normalize and split the line
- final String[] tokens = value.toLowerCase().split("\\W+");
-
- // emit the pairs
- for (final String token : tokens) {
- if (token.length() > 0) {
- out.collect(new Tuple2<String, Integer>(token, 1));
- }
- }
- }
- }
-
- // *************************************************************************
- // UTIL METHODS
- // *************************************************************************
-
- private static boolean fileOutput = false;
- private static String textPath;
- private static String outputPath;
-
- private static boolean parseParameters(final String[] args) {
-
- if (args.length > 0) {
- // parse input arguments
- fileOutput = true;
- if (args.length == 2) {
- textPath = args[0];
- outputPath = args[1];
- } else {
- System.err.println("Usage: SpoutSourceWordCount <text path> <result path>");
- return false;
- }
- } else {
- System.out.println("Executing SpoutSourceWordCount example with built-in default data");
- System.out.println(" Provide parameters to read input data from a file");
- System.out.println(" Usage: SpoutSourceWordCount <text path> <result path>");
- }
- return true;
- }
-
- private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
- if (fileOutput) {
- // read the text file from given input path
- final String[] tokens = textPath.split(":");
- final String localFile = tokens[tokens.length - 1];
- return env.addSource(
- new SpoutWrapper<String>(new WordCountFileSpout(localFile),
- new String[] { Utils.DEFAULT_STREAM_ID }, -1),
- TypeExtractor.getForClass(String.class)).setParallelism(1);
- }
-
- return env.addSource(
- new SpoutWrapper<String>(new WordCountInMemorySpout(),
- new String[] { Utils.DEFAULT_STREAM_ID }, -1),
- TypeExtractor.getForClass(String.class)).setParallelism(1);
-
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounter.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounter.java
deleted file mode 100644
index 34fc703..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounter.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Implements the word counter that counts the occurrence of each unique word. The bolt takes a pair (input tuple
- * schema: {@code <String,Integer>}) and sums the given word count for each unique word (output tuple schema:
- * {@code <String,Integer>} ).
- *
- * <p>Same as {@link BoltCounterByName}, but accesses input attribute by index (instead of name).
- */
-public class BoltCounter implements IRichBolt {
- private static final long serialVersionUID = 399619605462625934L;
-
- public static final String ATTRIBUTE_WORD = "word";
- public static final String ATTRIBUTE_COUNT = "count";
-
- private final HashMap<String, Count> counts = new HashMap<String, Count>();
- private OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void execute(final Tuple input) {
- final String word = input.getString(BoltTokenizer.ATTRIBUTE_WORD_INDEX);
-
- Count currentCount = this.counts.get(word);
- if (currentCount == null) {
- currentCount = new Count();
- this.counts.put(word, currentCount);
- }
- currentCount.count += input.getInteger(BoltTokenizer.ATTRIBUTE_COUNT_INDEX);
-
- this.collector.emit(new Values(word, currentCount.count));
- }
-
- @Override
- public void cleanup() {/* nothing to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields(ATTRIBUTE_WORD, ATTRIBUTE_COUNT));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- /**
- * A counter helper to emit immutable tuples to the given stormCollector and avoid unnecessary object
- * creating/deletion.
- */
- private static final class Count {
- public int count;
-
- public Count() {/* nothing to do */}
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounterByName.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounterByName.java
deleted file mode 100644
index cd53d50..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltCounterByName.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Implements the word counter that counts the occurrence of each unique word. The bolt takes a pair (input tuple
- * schema: {@code <String,Integer>}) and sums the given word count for each unique word (output tuple schema:
- * {@code <String,Integer>} ).
- *
- * <p>Same as {@link BoltCounter}, but accesses input attribute by name (instead of index).
- */
-public class BoltCounterByName implements IRichBolt {
- private static final long serialVersionUID = 399619605462625934L;
-
- public static final String ATTRIBUTE_WORD = "word";
- public static final String ATTRIBUTE_COUNT = "count";
-
- private final HashMap<String, Count> counts = new HashMap<String, Count>();
- private OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void execute(final Tuple input) {
- final String word = input.getStringByField(BoltTokenizer.ATTRIBUTE_WORD);
-
- Count currentCount = this.counts.get(word);
- if (currentCount == null) {
- currentCount = new Count();
- this.counts.put(word, currentCount);
- }
- currentCount.count += input.getIntegerByField(BoltTokenizer.ATTRIBUTE_COUNT);
-
- this.collector.emit(new Values(word, currentCount.count));
- }
-
- @Override
- public void cleanup() {/* nothing to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields(ATTRIBUTE_WORD, ATTRIBUTE_COUNT));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- /**
- * A counter helper to emit immutable tuples to the given stormCollector and avoid unnecessary object
- * creating/deletion.
- */
- private static final class Count {
- public int count;
-
- public Count() {/* nothing to do */}
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizer.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizer.java
deleted file mode 100644
index 41e8a8d..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizer.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * Implements the string tokenizer that splits sentences into words as a bolt. The bolt takes a line (input tuple
- * schema: {@code <String>}) and splits it into multiple pairs in the form of "(word,1)" (output tuple schema:
- * {@code <String,Integer>}).
- *
- * <p>Same as {@link BoltTokenizerByName}, but accesses input attribute by index (instead of name).
- */
-public final class BoltTokenizer implements IRichBolt {
- private static final long serialVersionUID = -8589620297208175149L;
-
- public static final String ATTRIBUTE_WORD = "word";
- public static final String ATTRIBUTE_COUNT = "count";
-
- public static final int ATTRIBUTE_WORD_INDEX = 0;
- public static final int ATTRIBUTE_COUNT_INDEX = 1;
-
- private OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void execute(final Tuple input) {
- final String[] tokens = input.getString(0).toLowerCase().split("\\W+");
-
- for (final String token : tokens) {
- if (token.length() > 0) {
- this.collector.emit(new Values(token, 1));
- }
- }
- }
-
- @Override
- public void cleanup() {/* nothing to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields(ATTRIBUTE_WORD, ATTRIBUTE_COUNT));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizerByName.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizerByName.java
deleted file mode 100644
index dff39eb..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/BoltTokenizerByName.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * Implements the string tokenizer that splits sentences into words as a bolt. The bolt takes a line (input tuple
- * schema: {@code <String>}) and splits it into multiple pairs in the form of "(word,1)" (output tuple schema:
- * {@code <String,Integer>}).
- *
- * <p>Same as {@link BoltTokenizer}, but accesses input attribute by name (instead of index).
- */
-public final class BoltTokenizerByName implements IRichBolt {
- private static final long serialVersionUID = -8589620297208175149L;
-
- public static final String ATTRIBUTE_WORD = "word";
- public static final String ATTRIBUTE_COUNT = "count";
-
- public static final int ATTRIBUTE_WORD_INDEX = 0;
- public static final int ATTRIBUTE_COUNT_INDEX = 1;
-
- private OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void execute(final Tuple input) {
- final String[] tokens = input.getStringByField("sentence").toLowerCase().split("\\W+");
-
- for (final String token : tokens) {
- if (token.length() > 0) {
- this.collector.emit(new Values(token, 1));
- }
- }
- }
-
- @Override
- public void cleanup() {/* nothing to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields(ATTRIBUTE_WORD, ATTRIBUTE_COUNT));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataPojos.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataPojos.java
deleted file mode 100644
index 7273eca..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataPojos.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.flink.storm.wordcount.util.WordCountData;
-
-import java.io.Serializable;
-
-/**
- * Input POJOs for WordCount programs.
- */
-public class WordCountDataPojos {
- public static final Sentence[] SENTENCES;
-
- static {
- SENTENCES = new Sentence[WordCountData.WORDS.length];
- for (int i = 0; i < SENTENCES.length; ++i) {
- SENTENCES[i] = new Sentence(WordCountData.WORDS[i]);
- }
- }
-
- /**
- * Simple POJO containing a string.
- */
- public static class Sentence implements Serializable {
- private static final long serialVersionUID = -7336372859203407522L;
-
- private String sentence;
-
- public Sentence() {
- }
-
- public Sentence(String sentence) {
- this.sentence = sentence;
- }
-
- public String getSentence() {
- return sentence;
- }
-
- public void setSentence(String sentence) {
- this.sentence = sentence;
- }
-
- @Override
- public String toString() {
- return "(" + this.sentence + ")";
- }
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataTuple.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataTuple.java
deleted file mode 100644
index 6adfdb4..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountDataTuple.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-
-/**
- * Input tuples for WordCount programs.
- */
-@SuppressWarnings("unchecked")
-public class WordCountDataTuple {
- public static final Tuple1<String>[] TUPLES;
-
- static {
- TUPLES = new Tuple1[WordCountData.WORDS.length];
- for (int i = 0; i < TUPLES.length; ++i) {
- TUPLES[i] = new Tuple1<String>(WordCountData.WORDS[i]);
- }
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountFileSpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountFileSpout.java
deleted file mode 100644
index 1298422..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountFileSpout.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.flink.storm.util.FileSpout;
-
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-
-/**
- * Implements a Spout that reads data from a given local file.
- */
-public final class WordCountFileSpout extends FileSpout {
- private static final long serialVersionUID = 2372251989250954503L;
-
- public WordCountFileSpout(String path) {
- super(path);
- }
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("sentence"));
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountInMemorySpout.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountInMemorySpout.java
deleted file mode 100644
index 6bb8ac1..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/operators/WordCountInMemorySpout.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.operators;
-
-import org.apache.flink.storm.util.FiniteInMemorySpout;
-import org.apache.flink.storm.wordcount.util.WordCountData;
-
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-
-/**
- * Implements a Spout that reads data from {@link WordCountData#WORDS}.
- */
-public final class WordCountInMemorySpout extends FiniteInMemorySpout {
- private static final long serialVersionUID = 8832143302409465843L;
-
- public WordCountInMemorySpout() {
- super(WordCountData.WORDS);
- }
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("sentence"));
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/util/WordCountData.java b/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/util/WordCountData.java
deleted file mode 100644
index 9682072..0000000
--- a/flink-contrib/flink-storm-examples/src/main/java/org/apache/flink/storm/wordcount/util/WordCountData.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount.util;
-
-/**
- * Provides the default data sets used for the WordCount example program.
- * The default data sets are used, if no parameters are given to the program.
- */
-public class WordCountData {
-
- public static final String[] WORDS = new String[]{
- "To be, or not to be,--that is the question:--",
- "Whether 'tis nobler in the mind to suffer",
- "The slings and arrows of outrageous fortune",
- "Or to take arms against a sea of troubles,",
- "And by opposing end them?--To die,--to sleep,--",
- "No more; and by a sleep to say we end",
- "The heartache, and the thousand natural shocks",
- "That flesh is heir to,--'tis a consummation",
- "Devoutly to be wish'd. To die,--to sleep;--",
- "To sleep! perchance to dream:--ay, there's the rub;",
- "For in that sleep of death what dreams may come,",
- "When we have shuffled off this mortal coil,",
- "Must give us pause: there's the respect",
- "That makes calamity of so long life;",
- "For who would bear the whips and scorns of time,",
- "The oppressor's wrong, the proud man's contumely,",
- "The pangs of despis'd love, the law's delay,",
- "The insolence of office, and the spurns",
- "That patient merit of the unworthy takes,",
- "When he himself might his quietus make",
- "With a bare bodkin? who would these fardels bear,",
- "To grunt and sweat under a weary life,",
- "But that the dread of something after death,--",
- "The undiscover'd country, from whose bourn",
- "No traveller returns,--puzzles the will,",
- "And makes us rather bear those ills we have",
- "Than fly to others that we know not of?",
- "Thus conscience does make cowards of us all;",
- "And thus the native hue of resolution",
- "Is sicklied o'er with the pale cast of thought;",
- "And enterprises of great pith and moment,",
- "With this regard, their currents turn awry,",
- "And lose the name of action.--Soft you now!",
- "The fair Ophelia!--Nymph, in thy orisons",
- "Be all my sins remember'd."
- };
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithBoltITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithBoltITCase.java
deleted file mode 100644
index f0725e9..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithBoltITCase.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation;
-
-import org.apache.flink.storm.exclamation.util.ExclamationData;
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the ExclamationWithBolt example.
- */
-public class ExclamationWithBoltITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
- String exclamationNum = "3";
-
- ExclamationWithBolt.main(new String[]{textPath, resultPath, exclamationNum});
-
- compareResultsByLinesInMemory(ExclamationData.TEXT_WITH_EXCLAMATIONS, resultPath);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithSpoutITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithSpoutITCase.java
deleted file mode 100644
index 4d16c4a..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/ExclamationWithSpoutITCase.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation;
-
-import org.apache.flink.storm.exclamation.util.ExclamationData;
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the ExclamationWithSpout example.
- */
-public class ExclamationWithSpoutITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
-
- ExclamationWithSpout.main(new String[]{textPath, resultPath});
-
- compareResultsByLinesInMemory(ExclamationData.TEXT_WITH_EXCLAMATIONS, resultPath);
- }
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/util/ExclamationData.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/util/ExclamationData.java
deleted file mode 100644
index f700009..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/exclamation/util/ExclamationData.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.exclamation.util;
-
-/**
- * Expected output of Exclamation programs.
- */
-public class ExclamationData {
-
- public static final String TEXT_WITH_EXCLAMATIONS =
- "Goethe - Faust: Der Tragoedie erster Teil!!!!!!\n"
- + "Prolog im Himmel.!!!!!!\n"
- + "Der Herr. Die himmlischen Heerscharen. Nachher Mephistopheles. Die drei!!!!!!\n"
- + "Erzengel treten vor.!!!!!!\n"
- + "RAPHAEL: Die Sonne toent, nach alter Weise, In Brudersphaeren Wettgesang,!!!!!!\n"
- + "Und ihre vorgeschriebne Reise Vollendet sie mit Donnergang. Ihr Anblick!!!!!!\n"
- + "gibt den Engeln Staerke, Wenn keiner Sie ergruenden mag; die unbegreiflich!!!!!!\n"
- + "hohen Werke Sind herrlich wie am ersten Tag.!!!!!!\n"
- + "GABRIEL: Und schnell und unbegreiflich schnelle Dreht sich umher der Erde!!!!!!\n"
- + "Pracht; Es wechselt Paradieseshelle Mit tiefer, schauervoller Nacht. Es!!!!!!\n"
- + "schaeumt das Meer in breiten Fluessen Am tiefen Grund der Felsen auf, Und!!!!!!\n"
- + "Fels und Meer wird fortgerissen Im ewig schnellem Sphaerenlauf.!!!!!!\n"
- + "MICHAEL: Und Stuerme brausen um die Wette Vom Meer aufs Land, vom Land!!!!!!\n"
- + "aufs Meer, und bilden wuetend eine Kette Der tiefsten Wirkung rings umher.!!!!!!\n"
- + "Da flammt ein blitzendes Verheeren Dem Pfade vor des Donnerschlags. Doch!!!!!!\n"
- + "deine Boten, Herr, verehren Das sanfte Wandeln deines Tags.!!!!!!\n"
- + "ZU DREI: Der Anblick gibt den Engeln Staerke, Da keiner dich ergruenden!!!!!!\n"
- + "mag, Und alle deine hohen Werke Sind herrlich wie am ersten Tag.!!!!!!\n"
- + "MEPHISTOPHELES: Da du, o Herr, dich einmal wieder nahst Und fragst, wie!!!!!!\n"
- + "alles sich bei uns befinde, Und du mich sonst gewoehnlich gerne sahst, So!!!!!!\n"
- + "siehst du mich auch unter dem Gesinde. Verzeih, ich kann nicht hohe Worte!!!!!!\n"
- + "machen, Und wenn mich auch der ganze Kreis verhoehnt; Mein Pathos braechte!!!!!!\n"
- + "dich gewiss zum Lachen, Haettst du dir nicht das Lachen abgewoehnt. Von!!!!!!\n"
- + "Sonn' und Welten weiss ich nichts zu sagen, Ich sehe nur, wie sich die!!!!!!\n"
- + "Menschen plagen. Der kleine Gott der Welt bleibt stets von gleichem!!!!!!\n"
- + "Schlag, Und ist so wunderlich als wie am ersten Tag. Ein wenig besser!!!!!!\n"
- + "wuerd er leben, Haettst du ihm nicht den Schein des Himmelslichts gegeben;!!!!!!\n"
- + "Er nennt's Vernunft und braucht's allein, Nur tierischer als jedes Tier!!!!!!\n"
- + "zu sein. Er scheint mir, mit Verlaub von euer Gnaden, Wie eine der!!!!!!\n"
- + "langbeinigen Zikaden, Die immer fliegt und fliegend springt Und gleich im!!!!!!\n"
- + "Gras ihr altes Liedchen singt; Und laeg er nur noch immer in dem Grase! In!!!!!!\n"
- + "jeden Quark begraebt er seine Nase.!!!!!!\n"
- + "DER HERR: Hast du mir weiter nichts zu sagen? Kommst du nur immer!!!!!!\n"
- + "anzuklagen? Ist auf der Erde ewig dir nichts recht?!!!!!!\n"
- + "MEPHISTOPHELES: Nein Herr! ich find es dort, wie immer, herzlich!!!!!!\n"
- + "schlecht. Die Menschen dauern mich in ihren Jammertagen, Ich mag sogar!!!!!!\n"
- + "die armen selbst nicht plagen.!!!!!!\n" + "DER HERR: Kennst du den Faust?!!!!!!\n"
- + "MEPHISTOPHELES: Den Doktor?!!!!!!\n"
- + "DER HERR: Meinen Knecht!!!!!!!\n"
- + "MEPHISTOPHELES: Fuerwahr! er dient Euch auf besondre Weise. Nicht irdisch!!!!!!\n"
- + "ist des Toren Trank noch Speise. Ihn treibt die Gaerung in die Ferne, Er!!!!!!\n"
- + "ist sich seiner Tollheit halb bewusst; Vom Himmel fordert er die schoensten!!!!!!\n"
- + "Sterne Und von der Erde jede hoechste Lust, Und alle Naeh und alle Ferne!!!!!!\n"
- + "Befriedigt nicht die tiefbewegte Brust.!!!!!!\n"
- + "DER HERR: Wenn er mir auch nur verworren dient, So werd ich ihn bald in!!!!!!\n"
- + "die Klarheit fuehren. Weiss doch der Gaertner, wenn das Baeumchen gruent, Das!!!!!!\n"
- + "Bluet und Frucht die kuenft'gen Jahre zieren.!!!!!!\n"
- + "MEPHISTOPHELES: Was wettet Ihr? den sollt Ihr noch verlieren! Wenn Ihr!!!!!!\n"
- + "mir die Erlaubnis gebt, Ihn meine Strasse sacht zu fuehren.!!!!!!\n"
- + "DER HERR: Solang er auf der Erde lebt, So lange sei dir's nicht verboten,!!!!!!\n"
- + "Es irrt der Mensch so lang er strebt.!!!!!!\n"
- + "MEPHISTOPHELES: Da dank ich Euch; denn mit den Toten Hab ich mich niemals!!!!!!\n"
- + "gern befangen. Am meisten lieb ich mir die vollen, frischen Wangen. Fuer!!!!!!\n"
- + "einem Leichnam bin ich nicht zu Haus; Mir geht es wie der Katze mit der Maus.!!!!!!\n"
- + "DER HERR: Nun gut, es sei dir ueberlassen! Zieh diesen Geist von seinem!!!!!!\n"
- + "Urquell ab, Und fuehr ihn, kannst du ihn erfassen, Auf deinem Wege mit!!!!!!\n"
- + "herab, Und steh beschaemt, wenn du bekennen musst: Ein guter Mensch, in!!!!!!\n"
- + "seinem dunklen Drange, Ist sich des rechten Weges wohl bewusst.!!!!!!\n"
- + "MEPHISTOPHELES: Schon gut! nur dauert es nicht lange. Mir ist fuer meine!!!!!!\n"
- + "Wette gar nicht bange. Wenn ich zu meinem Zweck gelange, Erlaubt Ihr mir!!!!!!\n"
- + "Triumph aus voller Brust. Staub soll er fressen, und mit Lust, Wie meine!!!!!!\n"
- + "Muhme, die beruehmte Schlange.!!!!!!\n"
- + "DER HERR: Du darfst auch da nur frei erscheinen; Ich habe deinesgleichen!!!!!!\n"
- + "nie gehasst. Von allen Geistern, die verneinen, ist mir der Schalk am!!!!!!\n"
- + "wenigsten zur Last. Des Menschen Taetigkeit kann allzu leicht erschlaffen,!!!!!!\n"
- + "er liebt sich bald die unbedingte Ruh; Drum geb ich gern ihm den Gesellen!!!!!!\n"
- + "zu, Der reizt und wirkt und muss als Teufel schaffen. Doch ihr, die echten!!!!!!\n"
- + "Goettersoehne, Erfreut euch der lebendig reichen Schoene! Das Werdende, das!!!!!!\n"
- + "ewig wirkt und lebt, Umfass euch mit der Liebe holden Schranken, Und was!!!!!!\n"
- + "in schwankender Erscheinung schwebt, Befestigt mit dauernden Gedanken!!!!!!!\n"
- + "(Der Himmel schliesst, die Erzengel verteilen sich.)!!!!!!\n"
- + "MEPHISTOPHELES (allein): Von Zeit zu Zeit seh ich den Alten gern, Und!!!!!!\n"
- + "huete mich, mit ihm zu brechen. Es ist gar huebsch von einem grossen Herrn,!!!!!!\n"
- + "So menschlich mit dem Teufel selbst zu sprechen.!!!!!!";
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/split/SplitITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/split/SplitITCase.java
deleted file mode 100644
index 944000c..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/split/SplitITCase.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.split;
-
-import org.apache.flink.storm.split.SpoutSplitExample.Enrich;
-import org.apache.flink.storm.split.operators.VerifyAndEnrichBolt;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Tests for split examples.
- */
-public class SplitITCase extends AbstractTestBase {
-
- private String output;
-
- @Before
- public void prepare() throws IOException {
- output = getTempFilePath("dummy").split(":")[1];
- }
-
- @After
- public void cleanUp() throws IOException {
- deleteRecursively(new File(output));
- }
-
- @Test
- public void testEmbeddedSpout() throws Exception {
- SpoutSplitExample.main(new String[] { "0", output });
- Assert.assertFalse(VerifyAndEnrichBolt.errorOccured);
- Assert.assertFalse(Enrich.errorOccured);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountITCase.java
deleted file mode 100644
index 57e5d42..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountITCase.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the BoltTokenizerWordCount example.
- */
-public class BoltTokenizerWordCountITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
-
- BoltTokenizerWordCount.main(new String[]{textPath, resultPath});
-
- compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojoITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojoITCase.java
deleted file mode 100644
index 656700e..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountPojoITCase.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the BoltTokenizerWordCountPojo example.
- */
-public class BoltTokenizerWordCountPojoITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
-
- BoltTokenizerWordCountPojo.main(new String[]{textPath, resultPath});
-
- compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNamesITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNamesITCase.java
deleted file mode 100644
index 18e1f01..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/BoltTokenizerWordCountWithNamesITCase.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the BoltTokenizerWordCountWithNames example.
- */
-public class BoltTokenizerWordCountWithNamesITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
-
- BoltTokenizerWordCountWithNames.main(new String[]{textPath, resultPath});
-
- compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/SpoutSourceWordCountITCase.java b/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/SpoutSourceWordCountITCase.java
deleted file mode 100644
index 594f56e..0000000
--- a/flink-contrib/flink-storm-examples/src/test/java/org/apache/flink/storm/wordcount/SpoutSourceWordCountITCase.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wordcount;
-
-import org.apache.flink.test.testdata.WordCountData;
-import org.apache.flink.test.util.AbstractTestBase;
-
-import org.junit.Test;
-
-/**
- * Test for the SpoutSourceWordCount example.
- */
-public class SpoutSourceWordCountITCase extends AbstractTestBase {
-
- @Test
- public void testProgram() throws Exception {
- String textPath = createTempFile("text.txt", WordCountData.TEXT);
- String resultPath = getTempDirPath("result");
-
- SpoutSourceWordCount.main(new String[]{textPath, resultPath});
-
- compareResultsByLinesInMemory(WordCountData.STREAMING_COUNTS_AS_TUPLES, resultPath);
- }
-
-}
diff --git a/flink-contrib/flink-storm-examples/src/test/resources/log4j-test.properties b/flink-contrib/flink-storm-examples/src/test/resources/log4j-test.properties
deleted file mode 100644
index 881dc06..0000000
--- a/flink-contrib/flink-storm-examples/src/test/resources/log4j-test.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-################################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-# Set root logger level to DEBUG and its only appender to A1.
-log4j.rootLogger=OFF, A1
-
-# A1 is set to be a ConsoleAppender.
-log4j.appender.A1=org.apache.log4j.ConsoleAppender
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
diff --git a/flink-contrib/flink-storm/README.md b/flink-contrib/flink-storm/README.md
deleted file mode 100644
index 578d6cc..0000000
--- a/flink-contrib/flink-storm/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# flink-storm
-
-`flink-storm` is compatibility layer for Apache Storm and allows to embed Spouts or Bolts unmodified within a regular Flink streaming program (`SpoutWrapper` and `BoltWrapper`).
-Additionally, a whole Storm topology can be submitted to Flink (see `FlinkLocalCluster`, and `FlinkSubmitter`).
-Only a few minor changes to the original submitting code are required.
-The code that builds the topology itself, can be reused unmodified. See `flink-storm-examples` for a simple word-count example.
-
-**Please note**: Do not add `storm-core` as a dependency. It is already included via `flink-storm`.
-
-The following Storm features are not (yet/fully) supported by the compatibility layer right now:
-* no fault-tolerance guarantees (ie, calls to `ack()`/`fail()` and anchoring is ignored)
-* for whole Storm topologies the following is not supported by Flink:
- * direct emit connection pattern
- * activating/deactivating and rebalancing of topologies
- * task hooks
- * metrics
diff --git a/flink-contrib/flink-storm/pom.xml b/flink-contrib/flink-storm/pom.xml
deleted file mode 100644
index 6b6f07e..0000000
--- a/flink-contrib/flink-storm/pom.xml
+++ /dev/null
@@ -1,193 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-contrib</artifactId>
- <version>1.8-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
-
- <artifactId>flink-storm_${scala.binary.version}</artifactId>
- <name>flink-storm</name>
-
- <packaging>jar</packaging>
-
-
- <repositories>
- <!-- This repository is needed as a stable source for some Clojure libraries -->
- <repository>
- <id>clojars</id>
- <url>https://clojars.org/repo/</url>
- <releases>
- <enabled>true</enabled>
- </releases>
- <snapshots>
- <enabled>false</enabled>
- </snapshots>
- </repository>
- </repositories>
-
-
- <dependencies>
-
- <!-- core dependencies -->
-
- <!-- Together with the dependency management section in flink-parent, this
- pins the Kryo version of transitive dependencies to the Flink Kryo version -->
- <dependency>
- <groupId>com.esotericsoftware.kryo</groupId>
- <artifactId>kryo</artifactId>
- <scope>provided</scope>
- </dependency>
-
- <!-- Core streaming API -->
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <!-- we only need the Apache Storm API, not all the runtime and web UI functionality,
- so we exclude many of the unnecessary and possibly conflicting dependencies -->
- <dependency>
- <groupId>org.apache.storm</groupId>
- <artifactId>storm-core</artifactId>
- <version>1.0.0</version>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>log4j-over-slf4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.logging.log4j</groupId>
- <artifactId>log4j-slf4j-impl</artifactId>
- </exclusion>
- <exclusion>
- <artifactId>slf4j-log4j12</artifactId>
- <groupId>org.slf4j</groupId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.curator</groupId>
- <artifactId>curator-test</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.esotericsoftware</groupId>
- <artifactId>kryo</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ring</groupId>
- <artifactId>ring-core</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ring</groupId>
- <artifactId>ring-devel</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ring</groupId>
- <artifactId>ring-servlet</artifactId>
- </exclusion>
- <exclusion>
- <groupId>ring</groupId>
- <artifactId>ring-jetty-adapter</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.mortbay.jetty</groupId>
- <artifactId>jetty</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.jgrapht</groupId>
- <artifactId>jgrapht-core</artifactId>
- </exclusion>
- <exclusion>
- <groupId>compojure</groupId>
- <artifactId>compojure</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.twitter</groupId>
- <artifactId>chill-java</artifactId>
- </exclusion>
- <exclusion>
- <groupId>commons-fileupload</groupId>
- <artifactId>commons-fileupload</artifactId>
- </exclusion>
- <exclusion>
- <groupId>javax.servlet</groupId>
- <artifactId>servlet-api</artifactId>
- </exclusion>
- <exclusion>
- <groupId>clout</groupId>
- <artifactId>clout</artifactId>
- </exclusion>
- <exclusion>
- <groupId>hiccup</groupId>
- <artifactId>hiccup</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>1.1</version>
- </dependency>
-
- <!-- test dependencies -->
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-runtime_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.flink</groupId>
- <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
- <version>${project.version}</version>
- <scope>test</scope>
- <type>test-jar</type>
- </dependency>
-
- </dependencies>
-
-</project>
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/FiniteSpout.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/FiniteSpout.java
deleted file mode 100644
index 7615b2e..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/FiniteSpout.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.topology.IRichSpout;
-
-/**
- * This interface represents a spout that emits a finite number of records. Common spouts emit infinite streams by
- * default. To change this behavior and take advantage of Flink's finite-source capabilities, the spout should implement
- * this interface.
- */
-public interface FiniteSpout extends IRichSpout {
-
- /**
- * When returns true, the spout has reached the end of the stream.
- *
- * @return true, if the spout's stream reached its end, false otherwise
- */
- boolean reachedEnd();
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/NullTerminatingSpout.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/NullTerminatingSpout.java
deleted file mode 100644
index a830b10..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/NullTerminatingSpout.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-
-import java.util.Map;
-
-/**
- * {@link NullTerminatingSpout} in a finite spout (ie, implements {@link FiniteSpout} interface) that wraps an
- * infinite spout, and returns {@code true} in {@link #reachedEnd()} when the wrapped spout does not emit a tuple
- * in {@code nextTuple()} for the first time.
- */
-public class NullTerminatingSpout implements FiniteSpout {
- private static final long serialVersionUID = -6976210409932076066L;
-
- /** The original infinite Spout. */
- private final IRichSpout spout;
- /** The observer that checks if the given spouts emit a tuple or not on nextTuple(). */
- private SpoutOutputCollectorObserver observer;
-
- public NullTerminatingSpout(IRichSpout spout) {
- this.spout = spout;
- }
-
- @Override
- public void open(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, SpoutOutputCollector collector) {
- this.observer = new SpoutOutputCollectorObserver(collector);
- this.observer.emitted = true;
- this.spout.open(conf, context, this.observer);
- }
-
- @Override
- public void close() {
- this.spout.close();
- }
-
- @Override
- public void activate() {
- this.spout.activate();
- }
-
- @Override
- public void deactivate() {
- this.spout.deactivate();
- }
-
- @Override
- public void nextTuple() {
- this.observer.emitted = false;
- this.spout.nextTuple();
- }
-
- @Override
- public void ack(Object msgId) {
- this.spout.ack(msgId);
- }
-
- @Override
- public void fail(Object msgId) {
- this.spout.fail(msgId);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- this.spout.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return this.spout.getComponentConfiguration();
- }
-
- @Override
- public boolean reachedEnd() {
- return !this.observer.emitted;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamMapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamMapper.java
deleted file mode 100644
index d2e84c5..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamMapper.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.flink.api.common.functions.MapFunction;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.datastream.SplitStream;
-
-/**
- * Strips {@link SplitStreamType}{@code <T>} away, ie, extracts the wrapped record of type {@code T}. Can be used to get
- * a "clean" stream from a Spout/Bolt that declared multiple output streams (after the streams got separated using
- * {@link DataStream#split(org.apache.flink.streaming.api.collector.selector.OutputSelector) .split(...)} and
- * {@link SplitStream#select(String...) .select(...)}).
- *
- * @param <T>
- */
-public class SplitStreamMapper<T> implements MapFunction<SplitStreamType<T>, T> {
- private static final long serialVersionUID = 3550359150160908564L;
-
- @Override
- public T map(SplitStreamType<T> value) throws Exception {
- return value.value;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
deleted file mode 100644
index 36894c7..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SplitStreamType.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.flink.streaming.api.datastream.DataStream;
-
-/**
- * Used by org.apache.flink.storm.wrappers.AbstractStormCollector to wrap
- * output tuples if multiple output streams are declared. For this case, the Flink output data stream must be split via
- * {@link DataStream#split(org.apache.flink.streaming.api.collector.selector.OutputSelector) .split(...)} using
- * {@link StormStreamSelector}.
- *
- */
-public class SplitStreamType<T> {
-
- /** The stream ID this tuple belongs to. */
- public String streamId;
- /** The actual data value. */
- public T value;
-
- @Override
- public String toString() {
- return "<sid:" + this.streamId + ",v:" + this.value + ">";
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- SplitStreamType<?> other = (SplitStreamType<?>) o;
-
- return this.streamId.equals(other.streamId) && this.value.equals(other.value);
- }
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SpoutOutputCollectorObserver.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SpoutOutputCollectorObserver.java
deleted file mode 100644
index 8be466e..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/SpoutOutputCollectorObserver.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.utils.Utils;
-
-import java.util.List;
-
-/**
- * Observes if a call to any {@code emit(...)} or {@code emitDirect(...)} method is made.
- * The internal flag {@link #emitted} must be reset by the user manually.
- */
-class SpoutOutputCollectorObserver extends SpoutOutputCollector {
-
- /** The collector to be observed. */
- private final SpoutOutputCollector delegate;
- /** The internal flag that it set to {@code true} if a tuple gets emitted. */
- boolean emitted;
-
- public SpoutOutputCollectorObserver(SpoutOutputCollector delegate) {
- super(null);
- this.delegate = delegate;
- }
-
- @Override
- public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) {
- emitted = true;
- return this.delegate.emit(streamId, tuple, messageId);
- }
-
- @Override
- public List<Integer> emit(List<Object> tuple, Object messageId) {
- return emit(Utils.DEFAULT_STREAM_ID, tuple, messageId);
- }
-
- @Override
- public List<Integer> emit(List<Object> tuple) {
- return emit(tuple, null);
- }
-
- @Override
- public List<Integer> emit(String streamId, List<Object> tuple) {
- return emit(streamId, tuple, null);
- }
-
- @Override
- public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) {
- emitted = true;
- delegate.emitDirect(taskId, streamId, tuple, messageId);
- }
-
- @Override
- public void emitDirect(int taskId, List<Object> tuple, Object messageId) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple, messageId);
- }
-
- @Override
- public void emitDirect(int taskId, String streamId, List<Object> tuple) {
- emitDirect(taskId, streamId, tuple, null);
- }
-
- @Override
- public void emitDirect(int taskId, List<Object> tuple) {
- emitDirect(taskId, tuple, null);
- }
-
- @Override
- public void reportError(Throwable error) {
- delegate.reportError(error);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormConfig.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormConfig.java
deleted file mode 100644
index a9d7bfd..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormConfig.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.flink.api.common.ExecutionConfig.GlobalJobParameters;
-
-import org.apache.storm.Config;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * {@link StormConfig} is used to provide a user-defined Storm configuration (ie, a raw {@link Map} or {@link Config}
- * object) for embedded Spouts and Bolts.
- */
-@SuppressWarnings("rawtypes")
-public final class StormConfig extends GlobalJobParameters implements Map {
- private static final long serialVersionUID = 8019519109673698490L;
-
- /** Contains the actual configuration that is provided to Spouts and Bolts. */
- private final Map config = new HashMap();
-
- /**
- * Creates an empty configuration.
- */
- public StormConfig() {
- }
-
- /**
- * Creates an configuration with initial values provided by the given {@code Map}.
- *
- * @param config
- * Initial values for this configuration.
- */
- @SuppressWarnings("unchecked")
- public StormConfig(Map config) {
- this.config.putAll(config);
- }
-
- @Override
- public int size() {
- return this.config.size();
- }
-
- @Override
- public boolean isEmpty() {
- return this.config.isEmpty();
- }
-
- @Override
- public boolean containsKey(Object key) {
- return this.config.containsKey(key);
- }
-
- @Override
- public boolean containsValue(Object value) {
- return this.config.containsValue(value);
- }
-
- @Override
- public Object get(Object key) {
- return this.config.get(key);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public Object put(Object key, Object value) {
- return this.config.put(key, value);
- }
-
- @Override
- public Object remove(Object key) {
- return this.config.remove(key);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public void putAll(Map m) {
- this.config.putAll(m);
- }
-
- @Override
- public void clear() {
- this.config.clear();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public Set<Object> keySet() {
- return this.config.keySet();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public Collection<Object> values() {
- return this.config.values();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public Set<java.util.Map.Entry<Object, Object>> entrySet() {
- return this.config.entrySet();
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormStreamSelector.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormStreamSelector.java
deleted file mode 100644
index e800726..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/util/StormStreamSelector.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.flink.streaming.api.collector.selector.OutputSelector;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * Used to split multiple declared output streams within Flink.
- */
-public final class StormStreamSelector<T> implements OutputSelector<SplitStreamType<T>> {
- private static final long serialVersionUID = 2553423379715401023L;
-
- /** internal cache to avoid short living ArrayList objects. */
- private final HashMap<String, List<String>> streams = new HashMap<String, List<String>>();
-
- @Override
- public Iterable<String> select(SplitStreamType<T> value) {
- String sid = value.streamId;
- List<String> streamId = this.streams.get(sid);
- if (streamId == null) {
- streamId = new ArrayList<String>(1);
- streamId.add(sid);
- this.streams.put(sid, streamId);
- }
- return streamId;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/AbstractStormCollector.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/AbstractStormCollector.java
deleted file mode 100644
index 21ce115..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/AbstractStormCollector.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.storm.util.SplitStreamType;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map.Entry;
-
-/**
- * A {@link AbstractStormCollector} transforms Storm tuples to Flink tuples.
- */
-abstract class AbstractStormCollector<OUT> {
-
- /** Flink output tuple of concrete type {@link Tuple0} to {@link Tuple25} per output stream. */
- protected final HashMap<String, Tuple> outputTuple = new HashMap<String, Tuple>();
- /** Flink split tuple. Used, if multiple output streams are declared. */
- private final SplitStreamType<Object> splitTuple = new SplitStreamType<Object>();
- /**
- * The number of attributes of the output tuples per stream. (Determines the concrete type of {@link #outputTuple}).
- * If {@link #numberOfAttributes} is zero, {@link #outputTuple} is not used and "raw" data type is used.
- */
- protected final HashMap<String, Integer> numberOfAttributes;
- /** Indicates of multiple output stream are declared and thus {@link SplitStreamType} must be used as output. */
- private final boolean split;
- /** The ID of the producer task. */
- private final int taskId;
- /** Is set to {@code true} each time a tuple is emitted. */
- boolean tupleEmitted = false;
-
- /**
- * Instantiates a new {@link AbstractStormCollector} that emits Flink tuples via {@link #doEmit(Object)}. If the
- * number of attributes is negative, any output type is supported (ie, raw type). If the number of attributes is
- * between 0 and 25, the output type is {@link Tuple0} to {@link Tuple25}, respectively.
- *
- * @param numberOfAttributes
- * The number of attributes of the emitted tuples per output stream.
- * @param taskId
- * The ID of the producer task (negative value for unknown).
- * @throws UnsupportedOperationException
- * if the specified number of attributes is greater than 25 or taskId support is enabled for a raw
- * stream
- */
- AbstractStormCollector(final HashMap<String, Integer> numberOfAttributes, final int taskId)
- throws UnsupportedOperationException {
- assert (numberOfAttributes != null);
-
- this.numberOfAttributes = numberOfAttributes;
- this.split = this.numberOfAttributes.size() > 1;
- this.taskId = taskId;
-
- for (Entry<String, Integer> outputStream : numberOfAttributes.entrySet()) {
- int numAtt = outputStream.getValue();
-
- if (this.taskId >= 0) {
- if (numAtt < 0) {
- throw new UnsupportedOperationException(
- "Task ID transmission not supported for raw streams: "
- + outputStream.getKey());
- }
- ++numAtt;
- }
-
- if (numAtt > 25) {
- if (this.taskId >= 0) {
- throw new UnsupportedOperationException(
- "Flink cannot handle more then 25 attributes, but 25 (24 plus 1 for produer task ID) "
- + " are declared for stream '" + outputStream.getKey() + "' by the given bolt.");
- } else {
- throw new UnsupportedOperationException(
- "Flink cannot handle more then 25 attributes, but " + numAtt
- + " are declared for stream '" + outputStream.getKey() + "' by the given bolt.");
- }
- } else if (numAtt >= 0) {
- try {
- this.outputTuple.put(outputStream.getKey(),
- org.apache.flink.api.java.tuple.Tuple.getTupleClass(numAtt)
- .newInstance());
- } catch (final InstantiationException e) {
- throw new RuntimeException(e);
- } catch (final IllegalAccessException e) {
- throw new RuntimeException(e);
- }
-
- }
- }
- }
-
- /**
- * Transforms a Storm tuple into a Flink tuple of type {@code OUT} and emits this tuple via {@link #doEmit(Object)}
- * to the specified output stream.
- *
- * @param The
- * The output stream id.
- * @param tuple
- * The Storm tuple to be emitted.
- * @return the return value of {@link #doEmit(Object)}
- */
- @SuppressWarnings("unchecked")
- protected final List<Integer> tansformAndEmit(final String streamId, final List<Object> tuple) {
- List<Integer> taskIds;
-
- int numAtt = this.numberOfAttributes.get(streamId);
- int taskIdIdx = numAtt;
- if (this.taskId >= 0 && numAtt < 0) {
- numAtt = 1;
- taskIdIdx = 0;
- }
- if (numAtt >= 0) {
- assert (tuple.size() == numAtt);
- Tuple out = this.outputTuple.get(streamId);
- for (int i = 0; i < numAtt; ++i) {
- out.setField(tuple.get(i), i);
- }
- if (this.taskId >= 0) {
- out.setField(this.taskId, taskIdIdx);
- }
- if (this.split) {
- this.splitTuple.streamId = streamId;
- this.splitTuple.value = out;
-
- taskIds = doEmit((OUT) this.splitTuple);
- } else {
- taskIds = doEmit((OUT) out);
- }
-
- } else {
- assert (tuple.size() == 1);
- if (this.split) {
- this.splitTuple.streamId = streamId;
- this.splitTuple.value = tuple.get(0);
-
- taskIds = doEmit((OUT) this.splitTuple);
- } else {
- taskIds = doEmit((OUT) tuple.get(0));
- }
- }
- this.tupleEmitted = true;
-
- return taskIds;
- }
-
- /**
- * Emits a Flink tuple.
- *
- * @param flinkTuple
- * The tuple to be emitted.
- * @return the IDs of the tasks this tuple was sent to
- */
- protected abstract List<Integer> doEmit(OUT flinkTuple);
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltCollector.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltCollector.java
deleted file mode 100644
index 82c7be3..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltCollector.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.streaming.api.operators.Output;
-import org.apache.flink.util.Collector;
-
-import org.apache.storm.task.IOutputCollector;
-import org.apache.storm.tuple.Tuple;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * A {@link BoltCollector} is used by {@link BoltWrapper} to provided an Storm compatible
- * output collector to the wrapped bolt. It transforms the emitted Storm tuples into Flink tuples
- * and emits them via the provide {@link Output} object.
- */
-class BoltCollector<OUT> extends AbstractStormCollector<OUT> implements IOutputCollector {
-
- /** The Flink output Collector. */
- private final Collector<OUT> flinkOutput;
-
- /**
- * Instantiates a new {@link BoltCollector} that emits Flink tuples to the given Flink output object. If the
- * number of attributes is negative, any output type is supported (ie, raw type). If the number of attributes is
- * between 0 and 25, the output type is {@link Tuple0} to {@link Tuple25}, respectively.
- *
- * @param numberOfAttributes
- * The number of attributes of the emitted tuples per output stream.
- * @param taskId
- * The ID of the producer task (negative value for unknown).
- * @param flinkOutput
- * The Flink output object to be used.
- * @throws UnsupportedOperationException
- * if the specified number of attributes is greater than 25
- */
- BoltCollector(final HashMap<String, Integer> numberOfAttributes, final int taskId,
- final Collector<OUT> flinkOutput) throws UnsupportedOperationException {
- super(numberOfAttributes, taskId);
- assert (flinkOutput != null);
- this.flinkOutput = flinkOutput;
- }
-
- @Override
- protected List<Integer> doEmit(final OUT flinkTuple) {
- this.flinkOutput.collect(flinkTuple);
- // TODO
- return null;
- }
-
- @Override
- public void reportError(final Throwable error) {
- // not sure, if Flink can support this
- }
-
- @Override
- public List<Integer> emit(final String streamId, final Collection<Tuple> anchors, final List<Object> tuple) {
- return this.tansformAndEmit(streamId, tuple);
- }
-
- @Override
- public void emitDirect(final int taskId, final String streamId, final Collection<Tuple> anchors, final List<Object> tuple) {
- throw new UnsupportedOperationException("Direct emit is not supported by Flink");
- }
-
- @Override
- public void ack(final Tuple input) {}
-
- @Override
- public void fail(final Tuple input) {}
-
- @Override
- public void resetTimeout(Tuple var1) {}
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
deleted file mode 100644
index 1c12290..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/BoltWrapper.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.common.ExecutionConfig.GlobalJobParameters;
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
-import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
-import org.apache.flink.streaming.api.operators.TimestampedCollector;
-import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.MessageId;
-import org.apache.storm.utils.Utils;
-
-import java.util.Collection;
-import java.util.HashMap;
-
-import static java.util.Arrays.asList;
-
-/**
- * A {@link BoltWrapper} wraps an {@link IRichBolt} in order to execute the Storm bolt within a Flink Streaming program.
- * It takes the Flink input tuples of type {@code IN} and transforms them into {@link StormTuple}s that the bolt can
- * process. Furthermore, it takes the bolt's output tuples and transforms them into Flink tuples of type {@code OUT}
- * (see {@link AbstractStormCollector} for supported types).<br/>
- * <br/>
- * <strong>Works for single input streams only! See {@link MergedInputsBoltWrapper} for multi-input stream
- * Bolts.</strong>
- */
-public class BoltWrapper<IN, OUT> extends AbstractStreamOperator<OUT> implements OneInputStreamOperator<IN, OUT> {
- private static final long serialVersionUID = -4788589118464155835L;
-
- /** The default input component ID. */
- public static final String DEFAULT_ID = "default ID";
- /** The default bolt ID. */
- public static final String DEFUALT_BOLT_NAME = "Unnamed Bolt";
-
- /** The wrapped Storm {@link IRichBolt bolt}. */
- protected final IRichBolt bolt;
- /** The name of the bolt. */
- private final String name;
- /** Number of attributes of the bolt's output tuples per stream. */
- private final HashMap<String, Integer> numberOfAttributes;
-
- /** The topology context of the bolt. */
- private transient TopologyContext topologyContext;
-
- /** The schema (ie, ordered field names) of the input streams per producer taskID. */
- private final HashMap<Integer, Fields> inputSchemas = new HashMap<Integer, Fields>();
-
- /**
- * We have to use this because Operators must output {@link StreamRecord}.
- */
- protected transient TimestampedCollector<OUT> flinkCollector;
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. As no input schema is defined, attribute-by-name access in only possible for
- * POJO input types. The output type will be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's
- * declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @throws IllegalArgumentException
- * If the number of declared output attributes is not with range [0;25].
- */
- public BoltWrapper(final IRichBolt bolt) throws IllegalArgumentException {
- this(bolt, null, (Collection<String>) null);
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. The given input schema enable attribute-by-name access for input types
- * {@link Tuple0} to {@link Tuple25}. The output type will be one of {@link Tuple0} to {@link Tuple25} depending on
- * the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param inputSchema
- * The schema (ie, ordered field names) of the input stream. @throws IllegalArgumentException
- * @throws IllegalArgumentException
- * If the number of declared output attributes is not with range [0;25].
- */
- public BoltWrapper(final IRichBolt bolt, final Fields inputSchema)
- throws IllegalArgumentException {
- this(bolt, inputSchema, (Collection<String>) null);
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. As no input schema is defined, attribute-by-name access in only possible for
- * POJO input types. The output type can be any type if parameter {@code rawOutput} is {@code true} and the bolt's
- * number of declared output tuples is 1. If {@code rawOutput} is {@code false} the output type will be one of
- * {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not within range
- * [1;25].
- */
- public BoltWrapper(final IRichBolt bolt, final String[] rawOutputs)
- throws IllegalArgumentException {
- this(bolt, null, asList(rawOutputs));
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. As no input schema is defined, attribute-by-name access in only possible for
- * POJO input types. The output type can be any type if parameter {@code rawOutput} is {@code true} and the bolt's
- * number of declared output tuples is 1. If {@code rawOutput} is {@code false} the output type will be one of
- * {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [1;25].
- */
- public BoltWrapper(final IRichBolt bolt, final Collection<String> rawOutputs) throws IllegalArgumentException {
- this(bolt, null, rawOutputs);
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. The given input schema enable attribute-by-name access for input types
- * {@link Tuple0} to {@link Tuple25}. The output type can be any type if parameter {@code rawOutput} is {@code true}
- * and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the output type will
- * be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param inputSchema
- * The schema (ie, ordered field names) of the input stream.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public BoltWrapper(
- final IRichBolt bolt,
- final Fields inputSchema,
- final String[] rawOutputs)
- throws IllegalArgumentException {
- this(bolt, inputSchema, asList(rawOutputs));
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. The given input schema enable attribute-by-name access for input types
- * {@link Tuple0} to {@link Tuple25}. The output type can be any type if parameter {@code rawOutput} is {@code true}
- * and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the output type will
- * be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param inputSchema
- * The schema (ie, ordered field names) of the input stream. @throws IllegalArgumentException If
- * {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public BoltWrapper(final IRichBolt bolt, final Fields inputSchema,
- final Collection<String> rawOutputs) throws IllegalArgumentException {
- this(bolt, DEFUALT_BOLT_NAME, Utils.DEFAULT_STREAM_ID, DEFAULT_ID, inputSchema, rawOutputs);
- }
-
- /**
- * Instantiates a new {@link BoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it can be used
- * within a Flink streaming program. The given input schema enable attribute-by-name access for input types
- * {@link Tuple0} to {@link Tuple25}. The output type can be any type if parameter {@code rawOutput} is {@code true}
- * and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the output type will
- * be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param name
- * The name of the bolt.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public BoltWrapper(final IRichBolt bolt, final String name, final String inputStreamId,
- final String inputComponentId, final Fields inputSchema,
- final Collection<String> rawOutputs) throws IllegalArgumentException {
- this.bolt = bolt;
- this.name = name;
- this.inputSchemas.put(null, inputSchema);
- this.numberOfAttributes = WrapperSetupHelper.getNumberOfAttributes(bolt, rawOutputs);
- }
-
- @Override
- public void open() throws Exception {
- super.open();
-
- this.flinkCollector = new TimestampedCollector<>(this.output);
-
- GlobalJobParameters config = getExecutionConfig().getGlobalJobParameters();
- StormConfig stormConfig = new StormConfig();
-
- if (config != null) {
- if (config instanceof StormConfig) {
- stormConfig = (StormConfig) config;
- } else {
- stormConfig.putAll(config.toMap());
- }
- }
-
- this.topologyContext = WrapperSetupHelper.createTopologyContext(
- getRuntimeContext(), this.bolt, this.name, stormConfig);
-
- final OutputCollector stormCollector = new OutputCollector(new BoltCollector<OUT>(
- this.numberOfAttributes, this.topologyContext.getThisTaskId(), this.flinkCollector));
-
- this.bolt.prepare(stormConfig, this.topologyContext, stormCollector);
- }
-
- @Override
- public void dispose() throws Exception {
- super.dispose();
- this.bolt.cleanup();
- }
-
- @Override
- public void processElement(final StreamRecord<IN> element) throws Exception {
- this.flinkCollector.setTimestamp(element);
-
- IN value = element.getValue();
-
- this.bolt.execute(new StormTuple<>(value, this.inputSchemas.get(null), -1, null, null,
- MessageId.makeUnanchored()));
- }
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/FlinkTopologyContext.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/FlinkTopologyContext.java
deleted file mode 100644
index e84abcc..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/FlinkTopologyContext.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import clojure.lang.Atom;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.hooks.ITaskHook;
-import org.apache.storm.metric.api.CombinedMetric;
-import org.apache.storm.metric.api.ICombiner;
-import org.apache.storm.metric.api.IMetric;
-import org.apache.storm.metric.api.IReducer;
-import org.apache.storm.metric.api.ReducedMetric;
-import org.apache.storm.state.ISubscribedState;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.tuple.Fields;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-/**
- * {@link FlinkTopologyContext} is a {@link TopologyContext} that overwrites certain method that are not applicable when
- * a Storm topology is executed within Flink.
- */
-final class FlinkTopologyContext extends TopologyContext {
-
- /**
- * Instantiates a new {@link FlinkTopologyContext} for a given Storm topology. The context object is instantiated
- * for each parallel task
- */
- FlinkTopologyContext(final StormTopology topology,
- @SuppressWarnings("rawtypes") final Map stormConf,
- final Map<Integer, String> taskToComponent, final Map<String, List<Integer>> componentToSortedTasks,
- final Map<String, Map<String, Fields>> componentToStreamToFields, final String stormId, final String codeDir,
- final String pidDir, final Integer taskId, final Integer workerPort, final List<Integer> workerTasks,
- final Map<String, Object> defaultResources, final Map<String, Object> userResources,
- final Map<String, Object> executorData, @SuppressWarnings("rawtypes") final Map registeredMetrics,
- final Atom openOrPrepareWasCalled) {
- super(topology, stormConf, taskToComponent, componentToSortedTasks, componentToStreamToFields, stormId,
- codeDir, pidDir, taskId, workerPort, workerTasks, defaultResources, userResources, executorData,
- registeredMetrics, openOrPrepareWasCalled);
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public void addTaskHook(final ITaskHook hook) {
- throw new UnsupportedOperationException("Task hooks are not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public Collection<ITaskHook> getHooks() {
- throw new UnsupportedOperationException("Task hooks are not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public IMetric getRegisteredMetricByName(final String name) {
- throw new UnsupportedOperationException("Metrics are not supported by Flink");
-
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @SuppressWarnings("rawtypes")
- @Override
- public CombinedMetric registerMetric(final String name, final ICombiner combiner, final int timeBucketSizeInSecs) {
- throw new UnsupportedOperationException("Metrics are not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @SuppressWarnings("rawtypes")
- @Override
- public ReducedMetric registerMetric(final String name, final IReducer combiner, final int timeBucketSizeInSecs) {
- throw new UnsupportedOperationException("Metrics are not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public <T extends IMetric> T registerMetric(final String name, final T metric, final int timeBucketSizeInSecs) {
- throw new UnsupportedOperationException("Metrics are not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public <T extends ISubscribedState> T setAllSubscribedState(final T obj) {
- throw new UnsupportedOperationException("Not supported by Flink");
-
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public <T extends ISubscribedState> T setSubscribedState(final String componentId, final T obj) {
- throw new UnsupportedOperationException("Not supported by Flink");
- }
-
- /**
- * Not supported by Flink.
- *
- * @throws UnsupportedOperationException
- * at every invocation
- */
- @Override
- public <T extends ISubscribedState> T setSubscribedState(final String componentId, final String streamId, final T
- obj) {
- throw new UnsupportedOperationException("Not supported by Flink");
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/MergedInputsBoltWrapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/MergedInputsBoltWrapper.java
deleted file mode 100644
index 88ae355..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/MergedInputsBoltWrapper.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
-
-import org.apache.storm.topology.IRichBolt;
-
-import java.util.Collection;
-
-import static java.util.Arrays.asList;
-
-/**
- * A {@link MergedInputsBoltWrapper} is a {@link BoltWrapper} that expects input tuples of type {@link StormTuple}. It
- * can be used to wrap a multi-input bolt and assumes that all input stream got merged into a {@link StormTuple} stream
- * already.
- */
-public final class MergedInputsBoltWrapper<IN, OUT> extends BoltWrapper<StormTuple<IN>, OUT> {
- private static final long serialVersionUID = 6399319187892878545L;
-
- /**
- * Instantiates a new {@link MergedInputsBoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it
- * can be used within a Flink streaming program. The output type will be one of {@link Tuple0} to {@link Tuple25}
- * depending on the bolt's declared number of attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @throws IllegalArgumentException
- * If the number of declared output attributes is not with range [0;25].
- */
- public MergedInputsBoltWrapper(final IRichBolt bolt) throws IllegalArgumentException {
- super(bolt);
- }
-
- /**
- * Instantiates a new {@link MergedInputsBoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it
- * can be used within a Flink streaming program. The output type can be any type if parameter {@code rawOutput} is
- * {@code true} and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the
- * output type will be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of
- * attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not within range
- * [1;25].
- */
- public MergedInputsBoltWrapper(final IRichBolt bolt, final String[] rawOutputs)
- throws IllegalArgumentException {
- super(bolt, asList(rawOutputs));
- }
-
- /**
- * Instantiates a new {@link MergedInputsBoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it
- * can be used within a Flink streaming program. The output type can be any type if parameter {@code rawOutput} is
- * {@code true} and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the
- * output type will be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of
- * attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [1;25].
- */
- public MergedInputsBoltWrapper(final IRichBolt bolt, final Collection<String> rawOutputs)
- throws IllegalArgumentException {
- super(bolt, rawOutputs);
- }
-
- /**
- * Instantiates a new {@link MergedInputsBoltWrapper} that wraps the given Storm {@link IRichBolt bolt} such that it
- * can be used within a Flink streaming program. The output type can be any type if parameter {@code rawOutput} is
- * {@code true} and the bolt's number of declared output tuples is 1. If {@code rawOutput} is {@code false} the
- * output type will be one of {@link Tuple0} to {@link Tuple25} depending on the bolt's declared number of
- * attributes.
- *
- * @param bolt
- * The Storm {@link IRichBolt bolt} to be used.
- * @param name
- * The name of the bolt.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public MergedInputsBoltWrapper(final IRichBolt bolt, final String name, final Collection<String> rawOutputs)
- throws IllegalArgumentException {
- super(bolt, name, null, null, null, rawOutputs);
- }
-
- @Override
- public void processElement(final StreamRecord<StormTuple<IN>> element) throws Exception {
- this.flinkCollector.setTimestamp(element);
- this.bolt.execute(element.getValue());
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarer.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarer.java
deleted file mode 100644
index d927f0e..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarer.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.utils.Utils;
-
-import java.util.HashMap;
-
-/**
- * {@link SetupOutputFieldsDeclarer} is used by {@link WrapperSetupHelper} to determine the output streams and
- * number of attributes declared by the wrapped spout's or bolt's {@code declare(...)}/{@code declareStream(...)}
- * method.
- */
-class SetupOutputFieldsDeclarer implements OutputFieldsDeclarer {
-
- /** The declared output streams and schemas. */
- HashMap<String, Fields> outputStreams = new HashMap<String, Fields>();
- /** The number of attributes for each declared stream by the wrapped operator. */
- HashMap<String, Integer> outputSchemas = new HashMap<String, Integer>();
-
- @Override
- public void declare(final Fields fields) {
- this.declareStream(Utils.DEFAULT_STREAM_ID, false, fields);
- }
-
- @Override
- public void declare(final boolean direct, final Fields fields) {
- this.declareStream(Utils.DEFAULT_STREAM_ID, direct, fields);
- }
-
- @Override
- public void declareStream(final String streamId, final Fields fields) {
- this.declareStream(streamId, false, fields);
- }
-
- @Override
- public void declareStream(final String streamId, final boolean direct, final Fields fields) {
- if (streamId == null) {
- throw new IllegalArgumentException("Stream ID cannot be null.");
- }
- if (direct) {
- throw new UnsupportedOperationException("Direct emit is not supported by Flink");
- }
-
- this.outputStreams.put(streamId, fields);
- this.outputSchemas.put(streamId, fields.size());
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutCollector.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutCollector.java
deleted file mode 100644
index 6e3a39a..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutCollector.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext;
-
-import org.apache.storm.spout.ISpoutOutputCollector;
-
-import java.util.HashMap;
-import java.util.List;
-
-/**
- * A {@link SpoutCollector} is used by {@link SpoutWrapper} to provided an Storm
- * compatible output collector to the wrapped spout. It transforms the emitted Storm tuples into
- * Flink tuples and emits them via the provide {@link SourceContext} object.
- */
-class SpoutCollector<OUT> extends AbstractStormCollector<OUT> implements ISpoutOutputCollector {
-
- /** The Flink source context object. */
- private final SourceContext<OUT> flinkContext;
-
- /**
- * Instantiates a new {@link SpoutCollector} that emits Flink tuples to the given Flink source context. If the
- * number of attributes is specified as zero, any output type is supported. If the number of attributes is between 0
- * to 25, the output type is {@link Tuple0} to {@link Tuple25}, respectively.
- *
- * @param numberOfAttributes
- * The number of attributes of the emitted tuples.
- * @param taskId
- * The ID of the producer task (negative value for unknown).
- * @param flinkContext
- * The Flink source context to be used.
- * @throws UnsupportedOperationException
- * if the specified number of attributes is greater than 25
- */
- SpoutCollector(final HashMap<String, Integer> numberOfAttributes, final int taskId,
- final SourceContext<OUT> flinkContext) throws UnsupportedOperationException {
- super(numberOfAttributes, taskId);
- assert (flinkContext != null);
- this.flinkContext = flinkContext;
- }
-
- @Override
- protected List<Integer> doEmit(final OUT flinkTuple) {
- this.flinkContext.collect(flinkTuple);
- // TODO
- return null;
- }
-
- @Override
- public void reportError(final Throwable error) {
- // not sure, if Flink can support this
- }
-
- @Override
- public List<Integer> emit(final String streamId, final List<Object> tuple, final Object messageId) {
- return this.tansformAndEmit(streamId, tuple);
- }
-
- @Override
- public void emitDirect(final int taskId, final String streamId, final List<Object> tuple, final Object messageId) {
- throw new UnsupportedOperationException("Direct emit is not supported by Flink");
- }
-
- public long getPendingCount() {
- return 0;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
deleted file mode 100644
index eb10384..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/SpoutWrapper.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.common.ExecutionConfig.GlobalJobParameters;
-import org.apache.flink.api.common.functions.StoppableFunction;
-import org.apache.flink.api.java.tuple.Tuple0;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.api.java.tuple.Tuple25;
-import org.apache.flink.storm.util.FiniteSpout;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
-import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-
-import java.util.Collection;
-import java.util.HashMap;
-
-import static java.util.Arrays.asList;
-
-/**
- * A {@link SpoutWrapper} wraps an {@link IRichSpout} in order to execute it within a Flink Streaming program. It
- * takes the spout's output tuples and transforms them into Flink tuples of type {@code OUT} (see
- * {@link SpoutCollector} for supported types).<br>
- * <br>
- * Per default, {@link SpoutWrapper} calls the wrapped spout's {@link IRichSpout#nextTuple() nextTuple()} method in
- * an infinite loop.<br>
- * Alternatively, {@link SpoutWrapper} can call {@link IRichSpout#nextTuple() nextTuple()} for a finite number of
- * times and terminate automatically afterwards (for finite input streams). The number of {@code nextTuple()} calls can
- * be specified as a certain number of invocations or can be undefined. In the undefined case, {@link SpoutWrapper}
- * terminates if no record was emitted to the output collector for the first time during a call to
- * {@link IRichSpout#nextTuple() nextTuple()}.<br>
- * If the given spout implements {@link FiniteSpout} interface and {@link #numberOfInvocations} is not provided or
- * is {@code null}, {@link SpoutWrapper} calls {@link IRichSpout#nextTuple() nextTuple()} method until
- * {@link FiniteSpout#reachedEnd()} returns true.
- */
-public final class SpoutWrapper<OUT> extends RichParallelSourceFunction<OUT> implements StoppableFunction {
- private static final long serialVersionUID = -218340336648247605L;
-
- /** Number of attributes of the spouts's output tuples per stream. */
- private final HashMap<String, Integer> numberOfAttributes;
- /** The wrapped {@link IRichSpout spout}. */
- private final IRichSpout spout;
- /** The name of the spout. */
- private final String name;
- /** Indicates, if the source is still running or was canceled. */
- private volatile boolean isRunning = true;
- /** The number of {@link IRichSpout#nextTuple()} calls. */
- private Integer numberOfInvocations; // do not use int -> null indicates an infinite loop
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} in an infinite loop. The output type will be one of {@link Tuple0} to
- * {@link Tuple25} depending on the spout's declared number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @throws IllegalArgumentException
- * If the number of declared output attributes is not with range [0;25].
- */
- public SpoutWrapper(final IRichSpout spout) throws IllegalArgumentException {
- this(spout, (Collection<String>) null, null);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} a finite number of times. The output type will be one of {@link Tuple0} to
- * {@link Tuple25} depending on the spout's declared number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param numberOfInvocations
- * The number of calls to {@link IRichSpout#nextTuple()}. If value is negative, {@link SpoutWrapper}
- * terminates if no tuple was emitted for the first time. If value is {@code null}, finite invocation is
- * disabled.
- * @throws IllegalArgumentException
- * If the number of declared output attributes is not with range [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final Integer numberOfInvocations)
- throws IllegalArgumentException {
- this(spout, (Collection<String>) null, numberOfInvocations);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} in an infinite loop. The output type can be any type if parameter
- * {@code rawOutput} is {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is
- * {@code false} the output type will be one of {@link Tuple0} to {@link Tuple25} depending on the spout's declared
- * number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type. (Can be {@code null}.)
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final String[] rawOutputs)
- throws IllegalArgumentException {
- this(spout, asList(rawOutputs), null);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} a finite number of times. The output type can be any type if parameter
- * {@code rawOutput} is {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is
- * {@code false} the output type will be one of {@link Tuple0} to {@link Tuple25} depending on the spout's declared
- * number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type. (Can be {@code null}.)
- * @param numberOfInvocations
- * The number of calls to {@link IRichSpout#nextTuple()}. If value is negative, {@link SpoutWrapper}
- * terminates if no tuple was emitted for the first time. If value is {@code null}, finite invocation is
- * disabled.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final String[] rawOutputs,
- final Integer numberOfInvocations) throws IllegalArgumentException {
- this(spout, asList(rawOutputs), numberOfInvocations);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} in an infinite loop. The output type can be any type if parameter
- * {@code rawOutput} is {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is
- * {@code false} the output type will be one of {@link Tuple0} to {@link Tuple25} depending on the spout's declared
- * number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type. (Can be {@code null}.)
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final Collection<String> rawOutputs)
- throws IllegalArgumentException {
- this(spout, rawOutputs, null);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} a finite number of times. The output type can be any type if parameter
- * {@code rawOutput} is {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is
- * {@code false} the output type will be one of {@link Tuple0} to {@link Tuple25} depending on the spout's declared
- * number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type. (Can be {@code null}.)
- * @param numberOfInvocations
- * The number of calls to {@link IRichSpout#nextTuple()}. If value is negative, {@link SpoutWrapper}
- * terminates if no tuple was emitted for the first time. If value is {@code null}, finite invocation is
- * disabled.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final Collection<String> rawOutputs,
- final Integer numberOfInvocations) throws IllegalArgumentException {
- this(spout, null, rawOutputs, numberOfInvocations);
- }
-
- /**
- * Instantiates a new {@link SpoutWrapper} that calls the {@link IRichSpout#nextTuple() nextTuple()} method of
- * the given {@link IRichSpout spout} a finite number of times. The output type can be any type if parameter
- * {@code rawOutput} is {@code true} and the spout's number of declared output tuples is 1. If {@code rawOutput} is
- * {@code false} the output type will be one of {@link Tuple0} to {@link Tuple25} depending on the spout's declared
- * number of attributes.
- *
- * @param spout
- * The {@link IRichSpout spout} to be used.
- * @param name
- * The name of the spout.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type {@link Tuple1} but be
- * of a raw type. (Can be {@code null}.)
- * @param numberOfInvocations
- * The number of calls to {@link IRichSpout#nextTuple()}. If value is negative, {@link SpoutWrapper}
- * terminates if no tuple was emitted for the first time. If value is {@code null}, finite invocation is
- * disabled.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- public SpoutWrapper(final IRichSpout spout, final String name, final Collection<String> rawOutputs,
- final Integer numberOfInvocations) throws IllegalArgumentException {
- this.spout = spout;
- this.name = name;
- this.numberOfAttributes = WrapperSetupHelper.getNumberOfAttributes(spout, rawOutputs);
- this.numberOfInvocations = numberOfInvocations;
- }
-
- @Override
- public final void run(final SourceContext<OUT> ctx) throws Exception {
- final GlobalJobParameters config = super.getRuntimeContext().getExecutionConfig()
- .getGlobalJobParameters();
- StormConfig stormConfig = new StormConfig();
-
- if (config != null) {
- if (config instanceof StormConfig) {
- stormConfig = (StormConfig) config;
- } else {
- stormConfig.putAll(config.toMap());
- }
- }
-
- final TopologyContext stormTopologyContext = WrapperSetupHelper.createTopologyContext(
- (StreamingRuntimeContext) super.getRuntimeContext(), this.spout, this.name,
- stormConfig);
-
- SpoutCollector<OUT> collector = new SpoutCollector<OUT>(this.numberOfAttributes,
- stormTopologyContext.getThisTaskId(), ctx);
-
- this.spout.open(stormConfig, stormTopologyContext, new SpoutOutputCollector(collector));
- this.spout.activate();
-
- if (numberOfInvocations == null) {
- if (this.spout instanceof FiniteSpout) {
- final FiniteSpout finiteSpout = (FiniteSpout) this.spout;
-
- while (this.isRunning && !finiteSpout.reachedEnd()) {
- finiteSpout.nextTuple();
- }
- } else {
- while (this.isRunning) {
- this.spout.nextTuple();
- }
- }
- } else {
- int counter = this.numberOfInvocations;
- if (counter >= 0) {
- while ((--counter >= 0) && this.isRunning) {
- this.spout.nextTuple();
- }
- } else {
- do {
- collector.tupleEmitted = false;
- this.spout.nextTuple();
- } while (collector.tupleEmitted && this.isRunning);
- }
- }
- }
-
- /**
- * {@inheritDoc}
- *
- * <p>Sets the {@link #isRunning} flag to {@code false}.
- */
- @Override
- public void cancel() {
- this.isRunning = false;
- }
-
- /**
- * {@inheritDoc}
- *
- * <p>Sets the {@link #isRunning} flag to {@code false}.
- */
- @Override
- public void stop() {
- this.isRunning = false;
- }
-
- @Override
- public void close() throws Exception {
- this.spout.close();
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/StormTuple.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/StormTuple.java
deleted file mode 100644
index a1d33e8..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/StormTuple.java
+++ /dev/null
@@ -1,398 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-/*
- * We do neither import
- * org.apache.storm.tuple.Tuple;
- * nor
- * org.apache.flink.api.java.tuple.Tuple
- * to avoid confusion
- */
-
-import org.apache.storm.generated.GlobalStreamId;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.MessageId;
-import org.apache.storm.tuple.Values;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.util.List;
-
-/**
- * {@link StormTuple} converts a Flink tuple of type {@code IN} into a Storm tuple.
- */
-public class StormTuple<IN> implements org.apache.storm.tuple.Tuple {
-
- /** The Storm representation of the original Flink tuple. */
- private final Values stormTuple;
- /** The schema (ie, ordered field names) of this tuple. */
- private final Fields schema;
- /** The task ID where this tuple was produced. */
- private final int producerTaskId;
- /** The input stream from which this tuple was received. */
- private final String producerStreamId;
- /** The producer's component ID of this tuple. */
- private final String producerComponentId;
- /** The message that is associated with this tuple. */
- private final MessageId messageId;
-
- /**
- * Create a new Storm tuple from the given Flink tuple.
- *
- * @param flinkTuple
- * The Flink tuple to be converted.
- * @param schema
- * The schema (ie, ordered field names) of the tuple.
- * @param producerTaskId
- * The task ID of the producer (a valid, ie, non-negative ID, implicates the truncation of the last
- * attribute of {@code flinkTuple}).
- * @param producerStreamId
- * The input stream ID from which this tuple was received.
- * @param producerComponentId
- * The component ID of the producer.
- * @param messageId
- * The message ID of this tuple.
- */
- public StormTuple(final IN flinkTuple, final Fields schema, final int producerTaskId, final String producerStreamId, final String producerComponentId, final MessageId messageId) {
- if (flinkTuple instanceof org.apache.flink.api.java.tuple.Tuple) {
- final org.apache.flink.api.java.tuple.Tuple t = (org.apache.flink.api.java.tuple.Tuple) flinkTuple;
-
- final int numberOfAttributes;
- // does flinkTuple carry producerTaskId as last attribute?
- if (producerTaskId < 0) {
- numberOfAttributes = t.getArity();
- } else {
- numberOfAttributes = t.getArity() - 1;
- }
- this.stormTuple = new Values();
- for (int i = 0; i < numberOfAttributes; ++i) {
- this.stormTuple.add(t.getField(i));
- }
- } else {
- this.stormTuple = new Values(flinkTuple);
- }
-
- this.schema = schema;
- this.producerTaskId = producerTaskId;
- this.producerStreamId = producerStreamId;
- this.producerComponentId = producerComponentId;
- this.messageId = messageId;
- }
-
- @Override
- public int size() {
- return this.stormTuple.size();
- }
-
- @Override
- public boolean contains(final String field) {
- if (this.schema != null) {
- return this.schema.contains(field);
- }
-
- try {
- this.getPublicMemberField(field);
- return true;
- } catch (NoSuchFieldException f) {
- try {
- this.getGetterMethod(field);
- return true;
- } catch (Exception g) {
- return false;
- }
- } catch (Exception e) {
- return false;
- }
- }
-
- @Override
- public Fields getFields() {
- return this.schema;
- }
-
- @Override
- public int fieldIndex(final String field) {
- return this.schema.fieldIndex(field);
- }
-
- @Override
- public List<Object> select(final Fields selector) {
- return this.schema.select(selector, this.stormTuple);
- }
-
- @Override
- public Object getValue(final int i) {
- return this.stormTuple.get(i);
- }
-
- @Override
- public String getString(final int i) {
- return (String) this.stormTuple.get(i);
- }
-
- @Override
- public Integer getInteger(final int i) {
- return (Integer) this.stormTuple.get(i);
- }
-
- @Override
- public Long getLong(final int i) {
- return (Long) this.stormTuple.get(i);
- }
-
- @Override
- public Boolean getBoolean(final int i) {
- return (Boolean) this.stormTuple.get(i);
- }
-
- @Override
- public Short getShort(final int i) {
- return (Short) this.stormTuple.get(i);
- }
-
- @Override
- public Byte getByte(final int i) {
- return (Byte) this.stormTuple.get(i);
- }
-
- @Override
- public Double getDouble(final int i) {
- return (Double) this.stormTuple.get(i);
- }
-
- @Override
- public Float getFloat(final int i) {
- return (Float) this.stormTuple.get(i);
- }
-
- @Override
- public byte[] getBinary(final int i) {
- return (byte[]) this.stormTuple.get(i);
- }
-
- private Field getPublicMemberField(final String field) throws Exception {
- assert (this.stormTuple.size() == 1);
- return this.stormTuple.get(0).getClass().getField(field);
- }
-
- private Method getGetterMethod(final String field) throws Exception {
- assert (this.stormTuple.size() == 1);
- return this.stormTuple
- .get(0)
- .getClass()
- .getMethod("get" + Character.toUpperCase(field.charAt(0)) + field.substring(1),
- (Class<?>[]) null);
- }
-
- private Object getValueByPublicMember(final String field) throws Exception {
- assert (this.stormTuple.size() == 1);
- return getPublicMemberField(field).get(this.stormTuple.get(0));
- }
-
- private Object getValueByGetter(final String field) throws Exception {
- assert (this.stormTuple.size() == 1);
- return getGetterMethod(field).invoke(this.stormTuple.get(0), (Object[]) null);
- }
-
- @SuppressWarnings("unchecked")
- public <T> T getValueByName(final String field) {
- if (this.schema != null) {
- return (T) this.getValue(this.schema.fieldIndex(field));
- }
- assert (this.stormTuple.size() == 1);
-
- Exception e;
- try {
- // try public member
- return (T) getValueByPublicMember(field);
- } catch (NoSuchFieldException f) {
- try {
- // try getter-method
- return (T) getValueByGetter(field);
- } catch (Exception g) {
- e = g;
- }
- } catch (Exception f) {
- e = f;
- }
-
- throw new RuntimeException("Could not access field <" + field + ">", e);
- }
-
- @Override
- public Object getValueByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public String getStringByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Integer getIntegerByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Long getLongByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Boolean getBooleanByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Short getShortByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Byte getByteByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Double getDoubleByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public Float getFloatByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public byte[] getBinaryByField(final String field) {
- return getValueByName(field);
- }
-
- @Override
- public List<Object> getValues() {
- return this.stormTuple;
- }
-
- @Override
- public GlobalStreamId getSourceGlobalStreamid() {
- return new GlobalStreamId(this.producerComponentId, this.producerStreamId);
- }
-
- @Override
- public String getSourceComponent() {
- return this.producerComponentId;
- }
-
- @Override
- public int getSourceTask() {
- return this.producerTaskId;
- }
-
- @Override
- public String getSourceStreamId() {
- return this.producerStreamId;
- }
-
- @Override
- public MessageId getMessageId() {
- return this.messageId;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((messageId == null) ? 0 : messageId.hashCode());
- result = prime * result
- + ((producerComponentId == null) ? 0 : producerComponentId.hashCode());
- result = prime * result + ((producerStreamId == null) ? 0 : producerStreamId.hashCode());
- result = prime * result + producerTaskId;
- result = prime * result + ((schema == null) ? 0 : schema.toList().hashCode());
- result = prime * result + ((stormTuple == null) ? 0 : stormTuple.hashCode());
- return result;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null) {
- return false;
- }
- if (getClass() != obj.getClass()) {
- return false;
- }
- StormTuple other = (StormTuple) obj;
- if (messageId == null) {
- if (other.messageId != null) {
- return false;
- }
- } else if (!messageId.equals(other.messageId)) {
- return false;
- }
- if (producerComponentId == null) {
- if (other.producerComponentId != null) {
- return false;
- }
- } else if (!producerComponentId.equals(other.producerComponentId)) {
- return false;
- }
- if (producerStreamId == null) {
- if (other.producerStreamId != null) {
- return false;
- }
- } else if (!producerStreamId.equals(other.producerStreamId)) {
- return false;
- }
- if (producerTaskId != other.producerTaskId) {
- return false;
- }
- if (schema == null) {
- if (other.schema != null) {
- return false;
- }
- } else if (!schema.toList().equals(other.schema.toList())) {
- return false;
- }
- if (stormTuple == null) {
- if (other.stormTuple != null) {
- return false;
- }
- } else if (!stormTuple.equals(other.stormTuple)) {
- return false;
- }
- return true;
- }
-
- @Override
- public String toString() {
- return "StormTuple{ " + stormTuple.toString() + "[" + this.producerComponentId + ","
- + this.producerStreamId + "," + this.producerTaskId + "," + this.messageId + "]}";
- }
-
- @Override
- public GlobalStreamId getSourceGlobalStreamId() {
- return new GlobalStreamId(this.producerComponentId, this.producerStreamId);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/WrapperSetupHelper.java b/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/WrapperSetupHelper.java
deleted file mode 100644
index 03af852..0000000
--- a/flink-contrib/flink-storm/src/main/java/org/apache/flink/storm/wrappers/WrapperSetupHelper.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
-
-import clojure.lang.Atom;
-import org.apache.storm.Config;
-import org.apache.storm.generated.Bolt;
-import org.apache.storm.generated.ComponentCommon;
-import org.apache.storm.generated.SpoutSpec;
-import org.apache.storm.generated.StateSpoutSpec;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IComponent;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.tuple.Fields;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-/**
- * {@link WrapperSetupHelper} is an helper class used by {@link SpoutWrapper} and
- * {@link BoltWrapper}.
- */
-class WrapperSetupHelper {
-
- /** The configuration key for the topology name. */
- static final String TOPOLOGY_NAME = "storm.topology.name";
-
- /**
- * Computes the number of output attributes used by a {@link SpoutWrapper} or {@link BoltWrapper}
- * per declared output stream. The number is {@code -1} for raw output type or a value within range [0;25] for
- * output type {@link org.apache.flink.api.java.tuple.Tuple0 Tuple0} to
- * {@link org.apache.flink.api.java.tuple.Tuple25 Tuple25}.
- *
- * @param spoutOrBolt
- * The Storm {@link IRichSpout spout} or {@link IRichBolt bolt} to be used.
- * @param rawOutputs
- * Contains stream names if a single attribute output stream, should not be of type
- * {@link org.apache.flink.api.java.tuple.Tuple1 Tuple1} but be of a raw type. (Can be {@code null}.)
- * @return The number of attributes to be used for each stream.
- * @throws IllegalArgumentException
- * If {@code rawOutput} is {@code true} and the number of declared output attributes is not 1 or if
- * {@code rawOutput} is {@code false} and the number of declared output attributes is not with range
- * [0;25].
- */
- static HashMap<String, Integer> getNumberOfAttributes(final IComponent spoutOrBolt,
- final Collection<String> rawOutputs)
- throws IllegalArgumentException {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- spoutOrBolt.declareOutputFields(declarer);
-
- for (Entry<String, Integer> schema : declarer.outputSchemas.entrySet()) {
- int declaredNumberOfAttributes = schema.getValue();
- if ((declaredNumberOfAttributes < 0) || (declaredNumberOfAttributes > 25)) {
- throw new IllegalArgumentException(
- "Provided bolt declares non supported number of output attributes. Must be in range [0;25] but "
- + "was " + declaredNumberOfAttributes);
- }
-
- if (rawOutputs != null && rawOutputs.contains(schema.getKey())) {
- if (declaredNumberOfAttributes != 1) {
- throw new IllegalArgumentException(
- "Ouput type is requested to be raw type, but provided bolt declares more then one output "
- + "attribute.");
- }
- schema.setValue(-1);
- }
- }
-
- return declarer.outputSchemas;
- }
-
- /**
- * Creates a {@link TopologyContext} for a Spout or Bolt instance (ie, Flink task / Storm executor).
- *
- * @param context
- * The Flink runtime context.
- * @param spoutOrBolt
- * The Spout or Bolt this context is created for.
- * @param stormConfig
- * The user provided configuration.
- * @return The created {@link TopologyContext}.
- */
- @SuppressWarnings({ "rawtypes", "unchecked" })
- static synchronized TopologyContext createTopologyContext(
- final StreamingRuntimeContext context, final IComponent spoutOrBolt,
- final String operatorName, final Map stormConfig) {
-
- final int dop = context.getNumberOfParallelSubtasks();
-
- final Map<Integer, String> taskToComponents = new HashMap<Integer, String>();
- final Map<String, List<Integer>> componentToSortedTasks = new HashMap<String, List<Integer>>();
- final Map<String, Map<String, Fields>> componentToStreamToFields = new HashMap<String, Map<String, Fields>>();
- String stormId = (String) stormConfig.get(TOPOLOGY_NAME);
- String codeDir = null; // not supported
- String pidDir = null; // not supported
- Integer taskId = -1;
- Integer workerPort = null; // not supported
- List<Integer> workerTasks = new ArrayList<Integer>();
- final Map<String, Object> defaultResources = new HashMap<String, Object>();
- final Map<String, Object> userResources = new HashMap<String, Object>();
- final Map<String, Object> executorData = new HashMap<String, Object>();
- final Map registeredMetrics = new HashMap();
- Atom openOrPrepareWasCalled = null;
-
- ComponentCommon common = new ComponentCommon();
- common.set_parallelism_hint(dop);
-
- HashMap<String, SpoutSpec> spouts = new HashMap<String, SpoutSpec>();
- HashMap<String, Bolt> bolts = new HashMap<String, Bolt>();
- if (spoutOrBolt instanceof IRichSpout) {
- spouts.put(operatorName, new SpoutSpec(null, common));
- } else {
- assert (spoutOrBolt instanceof IRichBolt);
- bolts.put(operatorName, new Bolt(null, common));
- }
- StormTopology stormTopology = new StormTopology(spouts, bolts, new HashMap<String, StateSpoutSpec>());
-
- List<Integer> sortedTasks = new ArrayList<Integer>(dop);
- for (int i = 1; i <= dop; ++i) {
- taskToComponents.put(i, operatorName);
- sortedTasks.add(i);
- }
- componentToSortedTasks.put(operatorName, sortedTasks);
-
- SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- spoutOrBolt.declareOutputFields(declarer);
- componentToStreamToFields.put(operatorName, declarer.outputStreams);
-
- if (!stormConfig.containsKey(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)) {
- stormConfig.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, 30); // Storm default value
- }
-
- return new FlinkTopologyContext(stormTopology, stormConfig, taskToComponents,
- componentToSortedTasks, componentToStreamToFields, stormId, codeDir, pidDir,
- taskId, workerPort, workerTasks, defaultResources, userResources, executorData,
- registeredMetrics, openOrPrepareWasCalled);
- }
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/AbstractTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/AbstractTest.java
deleted file mode 100644
index ca0e067..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/AbstractTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.junit.Before;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Random;
-
-/**
- * Abstract class for all tests that require a {@link Random} to be setup before each test.
- */
-public abstract class AbstractTest {
- private static final Logger LOG = LoggerFactory.getLogger(AbstractTest.class);
-
- protected long seed;
- protected Random r;
-
- @Before
- public void prepare() {
- this.seed = System.currentTimeMillis();
- this.r = new Random(this.seed);
- LOG.info("Test seed: {}", new Long(this.seed));
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/FiniteTestSpout.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/FiniteTestSpout.java
deleted file mode 100644
index 8b89c95..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/FiniteTestSpout.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * Tests for the Finite.
- */
-public class FiniteTestSpout implements IRichSpout {
- private static final long serialVersionUID = 7992419478267824279L;
-
- private int numberOfOutputTuples;
- private SpoutOutputCollector collector;
-
- public FiniteTestSpout(final int numberOfOutputTuples) {
- this.numberOfOutputTuples = numberOfOutputTuples;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(final Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
- this.collector = collector;
- }
-
- @Override
- public void close() {/* nothing to do */}
-
- @Override
- public void activate() {/* nothing to do */}
-
- @Override
- public void deactivate() {/* nothing to do */}
-
- @Override
- public void nextTuple() {
- if (--this.numberOfOutputTuples >= 0) {
- this.collector.emit(new Values(new Integer(this.numberOfOutputTuples)));
- }
- }
-
- @Override
- public void ack(final Object msgId) {/* nothing to do */}
-
- @Override
- public void fail(final Object msgId) {/* nothing to do */}
-
- @Override
- public void declareOutputFields(final OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("dummy"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/NullTerminatingSpoutTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/NullTerminatingSpoutTest.java
deleted file mode 100644
index 7263ce4..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/NullTerminatingSpoutTest.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.mockito.Matchers.same;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-/**
- * Tests for the NullTerminatingSpout.
- */
-public class NullTerminatingSpoutTest {
-
- @Test
- public void testMethodCalls() {
- Map<String, Object> compConfig = new HashMap<String, Object>();
-
- IRichSpout spoutMock = mock(IRichSpout.class);
- when(spoutMock.getComponentConfiguration()).thenReturn(compConfig);
-
- Map<?, ?> conf = mock(Map.class);
- TopologyContext context = mock(TopologyContext.class);
- Object msgId = mock(Object.class);
- OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
-
- NullTerminatingSpout spout = new NullTerminatingSpout(spoutMock);
-
- spout.open(conf, context, null);
- spout.close();
- spout.activate();
- spout.deactivate();
- spout.ack(msgId);
- spout.fail(msgId);
- spout.declareOutputFields(declarer);
- Map<String, Object> c = spoutMock.getComponentConfiguration();
-
- verify(spoutMock).open(same(conf), same(context), any(SpoutOutputCollector.class));
- verify(spoutMock).close();
- verify(spoutMock).activate();
- verify(spoutMock).deactivate();
- verify(spoutMock).ack(same(msgId));
- verify(spoutMock).fail(same(msgId));
- verify(spoutMock).declareOutputFields(same(declarer));
- Assert.assertSame(compConfig, c);
- }
-
- @Test
- public void testReachedEnd() {
- NullTerminatingSpout finiteSpout = new NullTerminatingSpout(new TestDummySpout());
- finiteSpout.open(null, null, mock(SpoutOutputCollector.class));
-
- Assert.assertFalse(finiteSpout.reachedEnd());
-
- finiteSpout.nextTuple();
- Assert.assertFalse(finiteSpout.reachedEnd());
- finiteSpout.nextTuple();
- Assert.assertTrue(finiteSpout.reachedEnd());
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/SpoutOutputCollectorObserverTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/SpoutOutputCollectorObserverTest.java
deleted file mode 100644
index c150cc3..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/SpoutOutputCollectorObserverTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.junit.Assert;
-import org.junit.Test;
-
-import static org.mockito.Mockito.mock;
-
-/**
- * Tests for the SpoutOutputCollectorObserver.
- */
-public class SpoutOutputCollectorObserverTest {
-
- @Test
- public void testFlag() {
- SpoutOutputCollectorObserver observer = new SpoutOutputCollectorObserver(mock(SpoutOutputCollector.class));
-
- observer.emitted = false;
- observer.emit(null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emit(null, (Object) null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emit((String) null, null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emit(null, null, null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emitDirect(0, null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emitDirect(0, null, (Object) null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emitDirect(0, (String) null, null);
- Assert.assertTrue(observer.emitted);
-
- observer.emitted = false;
- observer.emitDirect(0, null, null, null);
- Assert.assertTrue(observer.emitted);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/StormStreamSelectorTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/StormStreamSelectorTest.java
deleted file mode 100644
index 67d4a17..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/StormStreamSelectorTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Iterator;
-
-/**
- * Tests for the StormStreamSelector.
- */
-public class StormStreamSelectorTest {
-
- @Test
- public void testSelector() {
- StormStreamSelector<Object> selector = new StormStreamSelector<Object>();
- SplitStreamType<Object> tuple = new SplitStreamType<Object>();
- Iterator<String> result;
-
- tuple.streamId = "stream1";
- result = selector.select(tuple).iterator();
- Assert.assertEquals("stream1", result.next());
- Assert.assertFalse(result.hasNext());
-
- tuple.streamId = "stream2";
- result = selector.select(tuple).iterator();
- Assert.assertEquals("stream2", result.next());
- Assert.assertFalse(result.hasNext());
-
- tuple.streamId = "stream1";
- result = selector.select(tuple).iterator();
- Assert.assertEquals("stream1", result.next());
- Assert.assertFalse(result.hasNext());
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummyBolt.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummyBolt.java
deleted file mode 100644
index 2773692..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummyBolt.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-/**
- * A test implementation of a {@link IRichBolt}.
- */
-public class TestDummyBolt implements IRichBolt {
- private static final long serialVersionUID = 6893611247443121322L;
-
- public static final String SHUFFLE_STREAM_ID = "shuffleStream";
- public static final String GROUPING_STREAM_ID = "groupingStream";
-
- private boolean emit = true;
- @SuppressWarnings("rawtypes")
- public Map config;
- private TopologyContext context;
- private OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
- this.config = stormConf;
- this.context = context;
- this.collector = collector;
- }
-
- @Override
- public void execute(Tuple input) {
- if (this.context.getThisTaskIndex() == 0) {
- this.collector.emit(SHUFFLE_STREAM_ID, input.getValues());
- }
- if (this.emit) {
- this.collector.emit(GROUPING_STREAM_ID, new Values("bolt", this.context));
- this.emit = false;
- }
- }
-
- @Override
- public void cleanup() {}
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(SHUFFLE_STREAM_ID, new Fields("data"));
- declarer.declareStream(GROUPING_STREAM_ID, new Fields("id", "data"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummySpout.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummySpout.java
deleted file mode 100644
index 5ff8289..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestDummySpout.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.utils.Utils;
-
-import java.util.Map;
-
-/**
- * A test implementation of a {@link IRichSpout}.
- */
-public class TestDummySpout implements IRichSpout {
- private static final long serialVersionUID = -5190945609124603118L;
-
- public static final String SPOUT_STREAM_ID = "spout-stream";
-
- private boolean emit = true;
- @SuppressWarnings("rawtypes")
- public Map config;
- private TopologyContext context;
- private SpoutOutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- this.config = conf;
- this.context = context;
- this.collector = collector;
- }
-
- @Override
- public void close() {}
-
- @Override
- public void activate() {}
-
- @Override
- public void deactivate() {}
-
- @Override
- public void nextTuple() {
- if (this.emit) {
- this.collector.emit(new Values(this.context));
- this.emit = false;
- }
- }
-
- @Override
- public void ack(Object msgId) {}
-
- @Override
- public void fail(Object msgId) {}
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(Utils.DEFAULT_STREAM_ID, new Fields("data"));
- declarer.declareStream(SPOUT_STREAM_ID, new Fields("id", "data"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestSink.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestSink.java
deleted file mode 100644
index a3bb884..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/util/TestSink.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.util;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Tuple;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * A test implementation of a {@link IRichBolt} that stores incoming records in {@link #RESULT}.
- */
-public class TestSink implements IRichBolt {
- private static final long serialVersionUID = 4314871456719370877L;
-
- public static final List<TopologyContext> RESULT = new LinkedList<TopologyContext>();
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
- RESULT.add(context);
- }
-
- @Override
- public void execute(Tuple input) {
- if (input.size() == 1) {
- RESULT.add((TopologyContext) input.getValue(0));
- } else {
- RESULT.add((TopologyContext) input.getValue(1));
- }
- }
-
- @Override
- public void cleanup() {}
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {}
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltCollectorTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltCollectorTest.java
deleted file mode 100644
index d48042b..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltCollectorTest.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.storm.util.AbstractTest;
-import org.apache.flink.streaming.api.operators.Output;
-
-import org.apache.storm.tuple.Values;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-
-/**
- * Tests for the BoltCollector.
- */
-public class BoltCollectorTest extends AbstractTest {
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testBoltStormCollector() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = -1; numberOfAttributes < 26; ++numberOfAttributes) {
- final Output flinkCollector = mock(Output.class);
- Tuple flinkTuple = null;
- final Values tuple = new Values();
-
- BoltCollector<?> collector;
-
- final String streamId = "streamId";
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put(streamId, numberOfAttributes);
-
- if (numberOfAttributes == -1) {
- collector = new BoltCollector(attributes, -1, flinkCollector);
- tuple.add(new Integer(this.r.nextInt()));
-
- } else {
- collector = new BoltCollector(attributes, -1, flinkCollector);
- flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
-
- for (int i = 0; i < numberOfAttributes; ++i) {
- tuple.add(new Integer(this.r.nextInt()));
- flinkTuple.setField(tuple.get(i), i);
- }
- }
-
- final Collection anchors = mock(Collection.class);
- final List<Integer> taskIds;
- taskIds = collector.emit(streamId, anchors, tuple);
-
- Assert.assertNull(taskIds);
-
- if (numberOfAttributes == -1) {
- verify(flinkCollector).collect(tuple.get(0));
- } else {
- verify(flinkCollector).collect(flinkTuple);
- }
- }
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testBoltStormCollectorWithTaskId() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = 0; numberOfAttributes < 25; ++numberOfAttributes) {
- final Output flinkCollector = mock(Output.class);
- final int taskId = 42;
- final String streamId = "streamId";
-
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put(streamId, numberOfAttributes);
-
- BoltCollector<?> collector = new BoltCollector(attributes, taskId, flinkCollector);
-
- final Values tuple = new Values();
- final Tuple flinkTuple = Tuple.getTupleClass(numberOfAttributes + 1).newInstance();
-
- for (int i = 0; i < numberOfAttributes; ++i) {
- tuple.add(new Integer(this.r.nextInt()));
- flinkTuple.setField(tuple.get(i), i);
- }
- flinkTuple.setField(taskId, numberOfAttributes);
-
- final Collection anchors = mock(Collection.class);
- final List<Integer> taskIds;
- taskIds = collector.emit(streamId, anchors, tuple);
-
- Assert.assertNull(taskIds);
-
- verify(flinkCollector).collect(flinkTuple);
- }
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testToManyAttributes() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", 26);
-
- new BoltCollector<Object>(attributes, -1, mock(Output.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testToManyAttributesWithTaskId() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", 25);
-
- new BoltCollector<Object>(attributes, 42, mock(Output.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testRawStreamWithTaskId() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", -1);
-
- new BoltCollector<Object>(attributes, 42, mock(Output.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testEmitDirect() {
- new BoltCollector<Object>(mock(HashMap.class), -1, mock(Output.class)).emitDirect(0, null,
- null, null);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltWrapperTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltWrapperTest.java
deleted file mode 100644
index d405a45..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/BoltWrapperTest.java
+++ /dev/null
@@ -1,389 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.common.ExecutionConfig;
-import org.apache.flink.api.common.TaskInfo;
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.configuration.Configuration;
-import org.apache.flink.configuration.UnmodifiableConfiguration;
-import org.apache.flink.core.fs.CloseableRegistry;
-import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
-import org.apache.flink.runtime.execution.Environment;
-import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
-import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo;
-import org.apache.flink.storm.util.AbstractTest;
-import org.apache.flink.storm.util.SplitStreamType;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.storm.util.TestDummyBolt;
-import org.apache.flink.streaming.api.operators.Output;
-import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
-import org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer;
-import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
-import org.apache.flink.streaming.runtime.tasks.StreamTask;
-import org.apache.flink.streaming.util.MockStreamConfig;
-
-import org.apache.storm.task.OutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.MessageId;
-import org.apache.storm.tuple.Values;
-import org.apache.storm.utils.Utils;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.isNotNull;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.same;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-/**
- * Tests for the BoltWrapper.
- */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({StreamElementSerializer.class, WrapperSetupHelper.class, StreamRecord.class})
-@PowerMockIgnore({"javax.management.*", "com.sun.jndi.*", "org.apache.log4j.*"})
-public class BoltWrapperTest extends AbstractTest {
-
- @Test(expected = IllegalArgumentException.class)
- public void testWrapperRawType() throws Exception {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields("dummy1", "dummy2"));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- new BoltWrapper<Object, Object>(mock(IRichBolt.class), new String[] { Utils.DEFAULT_STREAM_ID });
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testWrapperToManyAttributes1() throws Exception {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- final String[] schema = new String[26];
- for (int i = 0; i < schema.length; ++i) {
- schema[i] = "a" + i;
- }
- declarer.declare(new Fields(schema));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- new BoltWrapper<Object, Object>(mock(IRichBolt.class));
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testWrapperToManyAttributes2() throws Exception {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- final String[] schema = new String[26];
- for (int i = 0; i < schema.length; ++i) {
- schema[i] = "a" + i;
- }
- declarer.declare(new Fields(schema));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- new BoltWrapper<Object, Object>(mock(IRichBolt.class), new String[] {});
- }
-
- @Test
- public void testWrapper() throws Exception {
- for (int i = -1; i < 26; ++i) {
- this.testWrapper(i);
- }
- }
-
- @SuppressWarnings({"rawtypes", "unchecked"})
- private void testWrapper(final int numberOfAttributes) throws Exception {
- assert ((-1 <= numberOfAttributes) && (numberOfAttributes <= 25));
- Tuple flinkTuple = null;
- String rawTuple = null;
-
- if (numberOfAttributes == -1) {
- rawTuple = "test";
- } else {
- flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
- }
-
- final String[] schema;
- if (numberOfAttributes == -1) {
- schema = new String[1];
- } else {
- schema = new String[numberOfAttributes];
- }
- for (int i = 0; i < schema.length; ++i) {
- schema[i] = "a" + i;
- }
-
- final StreamRecord record = mock(StreamRecord.class);
- if (numberOfAttributes == -1) {
- when(record.getValue()).thenReturn(rawTuple);
- } else {
- when(record.getValue()).thenReturn(flinkTuple);
- }
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
- when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
-
- final IRichBolt bolt = mock(IRichBolt.class);
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields(schema));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- final BoltWrapper wrapper = new BoltWrapper(bolt, (Fields) null);
- wrapper.setup(createMockStreamTask(), new MockStreamConfig(), mock(Output.class));
- wrapper.open();
-
- wrapper.processElement(record);
- if (numberOfAttributes == -1) {
- verify(bolt).execute(
- eq(new StormTuple<String>(rawTuple, null, -1, null, null, MessageId
- .makeUnanchored())));
- } else {
- verify(bolt).execute(
- eq(new StormTuple<Tuple>(flinkTuple, null, -1, null, null, MessageId
- .makeUnanchored())));
- }
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testMultipleOutputStreams() throws Exception {
- final boolean rawOutType1 = super.r.nextBoolean();
- final boolean rawOutType2 = super.r.nextBoolean();
-
- final StreamRecord record = mock(StreamRecord.class);
- when(record.getValue()).thenReturn(2).thenReturn(3);
-
- final Output output = mock(Output.class);
-
- final TestBolt bolt = new TestBolt();
- final HashSet<String> raw = new HashSet<String>();
- if (rawOutType1) {
- raw.add("stream1");
- }
- if (rawOutType2) {
- raw.add("stream2");
- }
-
- final BoltWrapper wrapper = new BoltWrapper(bolt, null, raw);
- wrapper.setup(createMockStreamTask(), new MockStreamConfig(), output);
- wrapper.open();
-
- final SplitStreamType splitRecord = new SplitStreamType<Integer>();
- if (rawOutType1) {
- splitRecord.streamId = "stream1";
- splitRecord.value = 2;
- } else {
- splitRecord.streamId = "stream1";
- splitRecord.value = new Tuple1<Integer>(2);
- }
- wrapper.processElement(record);
- verify(output).collect(new StreamRecord<SplitStreamType>(splitRecord));
-
- if (rawOutType2) {
- splitRecord.streamId = "stream2";
- splitRecord.value = 3;
- } else {
- splitRecord.streamId = "stream2";
- splitRecord.value = new Tuple1<Integer>(3);
- }
- wrapper.processElement(record);
- verify(output, times(2)).collect(new StreamRecord<SplitStreamType>(splitRecord));
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void testOpen() throws Exception {
-
- // utility mocks
- final StormConfig stormConfig = new StormConfig();
- final Configuration flinkConfig = new Configuration();
-
- final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
- when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig)
- .thenReturn(flinkConfig);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
- when(taskContext.getTaskName()).thenReturn("name");
- when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields("dummy"));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- // (1) open with no configuration
- {
- ExecutionConfig execConfig = mock(ExecutionConfig.class);
- when(execConfig.getGlobalJobParameters()).thenReturn(null);
-
- final IRichBolt bolt = mock(IRichBolt.class);
- BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
- wrapper.setup(createMockStreamTask(execConfig), new MockStreamConfig(), mock(Output.class));
-
- wrapper.open();
- verify(bolt).prepare(any(Map.class), any(TopologyContext.class), any(OutputCollector.class));
- }
-
- // (2) open with a storm specific configuration
- {
- ExecutionConfig execConfig = mock(ExecutionConfig.class);
- when(execConfig.getGlobalJobParameters()).thenReturn(stormConfig);
-
- final IRichBolt bolt = mock(IRichBolt.class);
- BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
- wrapper.setup(createMockStreamTask(execConfig), new MockStreamConfig(), mock(Output.class));
-
- wrapper.open();
- verify(bolt).prepare(same(stormConfig), any(TopologyContext.class), any(OutputCollector.class));
- }
-
- // (3) open with a flink config
- {
- final Configuration cfg = new Configuration();
- cfg.setString("foo", "bar");
- cfg.setInteger("the end (the int)", Integer.MAX_VALUE);
-
- ExecutionConfig execConfig = mock(ExecutionConfig.class);
- when(execConfig.getGlobalJobParameters()).thenReturn(new UnmodifiableConfiguration(cfg));
-
- TestDummyBolt testBolt = new TestDummyBolt();
- BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(testBolt);
- wrapper.setup(createMockStreamTask(execConfig), new MockStreamConfig(), mock(Output.class));
-
- wrapper.open();
- for (Entry<String, String> entry : cfg.toMap().entrySet()) {
- Assert.assertEquals(entry.getValue(), testBolt.config.get(entry.getKey()));
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void testOpenSink() throws Exception {
- final StormConfig stormConfig = new StormConfig();
- final Configuration flinkConfig = new Configuration();
-
- final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
- when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig)
- .thenReturn(flinkConfig);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
- when(taskContext.getTaskName()).thenReturn("name");
- when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
-
- final IRichBolt bolt = mock(IRichBolt.class);
- BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
-
- wrapper.setup(createMockStreamTask(), new MockStreamConfig(), mock(Output.class));
- wrapper.open();
-
- verify(bolt).prepare(any(Map.class), any(TopologyContext.class), isNotNull(OutputCollector.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void testClose() throws Exception {
- final IRichBolt bolt = mock(IRichBolt.class);
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields("dummy"));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- final BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
-
- wrapper.setup(createMockStreamTask(), new MockStreamConfig(), mock(Output.class));
-
- wrapper.close();
- wrapper.dispose();
-
- verify(bolt).cleanup();
- }
-
- private static final class TestBolt implements IRichBolt {
- private static final long serialVersionUID = 7278692872260138758L;
- private transient OutputCollector collector;
-
- @SuppressWarnings("rawtypes")
- @Override
- public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
- this.collector = collector;
- }
-
- int counter = 0;
- @Override
- public void execute(org.apache.storm.tuple.Tuple input) {
- if (++counter % 2 == 1) {
- this.collector.emit("stream1", new Values(input.getInteger(0)));
- } else {
- this.collector.emit("stream2", new Values(input.getInteger(0)));
- }
- }
-
- @Override
- public void cleanup() {}
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream("stream1", new Fields("a1"));
- declarer.declareStream("stream2", new Fields("a2"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
- }
-
- public static StreamTask<?, ?> createMockStreamTask() {
- return createMockStreamTask(new ExecutionConfig());
- }
-
- public static StreamTask<?, ?> createMockStreamTask(ExecutionConfig execConfig) {
- Environment env = mock(Environment.class);
- when(env.getTaskInfo()).thenReturn(new TaskInfo("Mock Task", 1, 0, 1, 0));
- when(env.getUserClassLoader()).thenReturn(BoltWrapperTest.class.getClassLoader());
- when(env.getMetricGroup()).thenReturn(UnregisteredMetricGroups.createUnregisteredTaskMetricGroup());
- when(env.getTaskManagerInfo()).thenReturn(new TestingTaskManagerRuntimeInfo());
-
- final CloseableRegistry closeableRegistry = new CloseableRegistry();
- StreamTask<?, ?> mockTask = mock(StreamTask.class);
- when(mockTask.getCheckpointLock()).thenReturn(new Object());
- when(mockTask.getConfiguration()).thenReturn(new MockStreamConfig());
- when(mockTask.getEnvironment()).thenReturn(env);
- when(mockTask.getExecutionConfig()).thenReturn(execConfig);
- when(mockTask.getCancelables()).thenReturn(closeableRegistry);
-
- return mockTask;
- }
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/FlinkTopologyContextTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/FlinkTopologyContextTest.java
deleted file mode 100644
index 0ee796b..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/FlinkTopologyContextTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.storm.util.AbstractTest;
-
-import org.apache.storm.generated.Bolt;
-import org.apache.storm.generated.SpoutSpec;
-import org.apache.storm.generated.StateSpoutSpec;
-import org.apache.storm.generated.StormTopology;
-import org.apache.storm.metric.api.ICombiner;
-import org.apache.storm.metric.api.IMetric;
-import org.apache.storm.metric.api.IReducer;
-import org.junit.Test;
-
-import java.util.HashMap;
-
-/**
- * FlinkTopologyContext.getSources(componentId) and FlinkTopologyContext.getTargets(componentId) are not tested here,
- * because those are tested in StormWrapperSetupHelperTest.
- */
-public class FlinkTopologyContextTest extends AbstractTest {
-
- @Test(expected = UnsupportedOperationException.class)
- public void testAddTaskHook() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .addTaskHook(null);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testGetHooks() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .getHooks();
- }
-
- @SuppressWarnings("rawtypes")
- @Test(expected = UnsupportedOperationException.class)
- public void testRegisteredMetric1() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .registerMetric(null, (ICombiner) null, 0);
- }
-
- @SuppressWarnings("rawtypes")
- @Test(expected = UnsupportedOperationException.class)
- public void testRegisteredMetric2() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .registerMetric(null, (IReducer) null, 0);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testRegisteredMetric3() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .registerMetric(null, (IMetric) null, 0);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testGetRegisteredMetricByName() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .getRegisteredMetricByName(null);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testSetAllSubscribedState() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .setAllSubscribedState(null);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testSetSubscribedState1() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .setSubscribedState(null, null);
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testSetSubscribedState2() {
- new FlinkTopologyContext(new StormTopology(new HashMap<String, SpoutSpec>(),
- new HashMap<String, Bolt>(), new HashMap<String, StateSpoutSpec>()), null, null,
- null, null, null, null, null, null, null, null, null, null, null, null, null)
- .setSubscribedState(null, null, null);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarerTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarerTest.java
deleted file mode 100644
index d6575d8..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SetupOutputFieldsDeclarerTest.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.storm.util.AbstractTest;
-
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.utils.Utils;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.ArrayList;
-
-/**
- * Tests for the SetupOutputFieldsDeclarer.
- */
-public class SetupOutputFieldsDeclarerTest extends AbstractTest {
-
- @Test
- public void testDeclare() {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
-
- int numberOfAttributes = this.r.nextInt(26);
- declarer.declare(createSchema(numberOfAttributes));
- Assert.assertEquals(1, declarer.outputSchemas.size());
- Assert.assertEquals(numberOfAttributes, declarer.outputSchemas.get(Utils.DEFAULT_STREAM_ID)
- .intValue());
-
- final String sid = "streamId";
- numberOfAttributes = this.r.nextInt(26);
- declarer.declareStream(sid, createSchema(numberOfAttributes));
- Assert.assertEquals(2, declarer.outputSchemas.size());
- Assert.assertEquals(numberOfAttributes, declarer.outputSchemas.get(sid).intValue());
- }
-
- private Fields createSchema(final int numberOfAttributes) {
- final ArrayList<String> schema = new ArrayList<String>(numberOfAttributes);
- for (int i = 0; i < numberOfAttributes; ++i) {
- schema.add("a" + i);
- }
- return new Fields(schema);
- }
-
- @Test
- public void testDeclareDirect() {
- new SetupOutputFieldsDeclarer().declare(false, new Fields());
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testDeclareDirectFail() {
- new SetupOutputFieldsDeclarer().declare(true, new Fields());
- }
-
- @Test
- public void testDeclareStream() {
- new SetupOutputFieldsDeclarer().declareStream(Utils.DEFAULT_STREAM_ID, new Fields());
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testDeclareStreamFail() {
- new SetupOutputFieldsDeclarer().declareStream(null, new Fields());
- }
-
- @Test
- public void testDeclareFullStream() {
- new SetupOutputFieldsDeclarer().declareStream(Utils.DEFAULT_STREAM_ID, false, new Fields());
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testDeclareFullStreamFailNonDefaultStream() {
- new SetupOutputFieldsDeclarer().declareStream(null, false, new Fields());
- }
-
- @Test(expected = UnsupportedOperationException.class)
- public void testDeclareFullStreamFailDirect() {
- new SetupOutputFieldsDeclarer().declareStream(Utils.DEFAULT_STREAM_ID, true, new Fields());
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutCollectorTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutCollectorTest.java
deleted file mode 100644
index b91871a..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutCollectorTest.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.storm.util.AbstractTest;
-import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext;
-
-import org.apache.storm.tuple.Values;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.List;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-
-/**
- * Tests for the SpoutCollector.
- */
-public class SpoutCollectorTest extends AbstractTest {
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testSpoutStormCollector() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = -1; numberOfAttributes < 26; ++numberOfAttributes) {
- final SourceContext flinkCollector = mock(SourceContext.class);
- Tuple flinkTuple = null;
- final Values tuple = new Values();
-
- SpoutCollector<?> collector;
-
- final String streamId = "streamId";
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put(streamId, numberOfAttributes);
-
- if (numberOfAttributes == -1) {
- collector = new SpoutCollector(attributes, -1, flinkCollector);
- tuple.add(new Integer(this.r.nextInt()));
- } else {
- collector = new SpoutCollector(attributes, -1, flinkCollector);
- flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
-
- for (int i = 0; i < numberOfAttributes; ++i) {
- tuple.add(new Integer(this.r.nextInt()));
- flinkTuple.setField(tuple.get(i), i);
- }
- }
-
- final List<Integer> taskIds;
- final Object messageId = new Integer(this.r.nextInt());
-
- taskIds = collector.emit(streamId, tuple, messageId);
-
- Assert.assertNull(taskIds);
-
- if (numberOfAttributes == -1) {
- verify(flinkCollector).collect(tuple.get(0));
- } else {
- verify(flinkCollector).collect(flinkTuple);
- }
- }
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testSpoutStormCollectorWithTaskId() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = 0; numberOfAttributes < 25; ++numberOfAttributes) {
- final SourceContext flinkCollector = mock(SourceContext.class);
- final int taskId = 42;
- final String streamId = "streamId";
-
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put(streamId, numberOfAttributes);
-
- SpoutCollector<?> collector = new SpoutCollector(attributes, taskId, flinkCollector);
-
- final Values tuple = new Values();
- final Tuple flinkTuple = Tuple.getTupleClass(numberOfAttributes + 1).newInstance();
-
- for (int i = 0; i < numberOfAttributes; ++i) {
- tuple.add(new Integer(this.r.nextInt()));
- flinkTuple.setField(tuple.get(i), i);
- }
- flinkTuple.setField(taskId, numberOfAttributes);
-
- final List<Integer> taskIds;
- final Object messageId = new Integer(this.r.nextInt());
-
- taskIds = collector.emit(streamId, tuple, messageId);
-
- Assert.assertNull(taskIds);
-
- verify(flinkCollector).collect(flinkTuple);
- }
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testTooManyAttributes() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", 26);
-
- new SpoutCollector<Object>(attributes, -1, mock(SourceContext.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testTooManyAttributesWithTaskId() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", 25);
-
- new SpoutCollector<Object>(attributes, 42, mock(SourceContext.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testRawStreamWithTaskId() {
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put("", -1);
-
- new SpoutCollector<Object>(attributes, 42, mock(SourceContext.class));
- }
-
- @SuppressWarnings("unchecked")
- @Test(expected = UnsupportedOperationException.class)
- public void testEmitDirect() {
- new SpoutCollector<Object>(mock(HashMap.class), -1, mock(SourceContext.class)).emitDirect(
- 0, null, null, (Object) null);
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutWrapperTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutWrapperTest.java
deleted file mode 100644
index e6d861b..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/SpoutWrapperTest.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.common.ExecutionConfig;
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.configuration.Configuration;
-import org.apache.flink.storm.util.AbstractTest;
-import org.apache.flink.storm.util.FiniteSpout;
-import org.apache.flink.storm.util.FiniteTestSpout;
-import org.apache.flink.storm.util.StormConfig;
-import org.apache.flink.storm.util.TestDummySpout;
-import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext;
-import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
-
-import org.apache.storm.spout.SpoutOutputCollector;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-/**
- * Tests for the SpoutWrapper.
- */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest(WrapperSetupHelper.class)
-@PowerMockIgnore({"javax.management.*", "com.sun.jndi.*", "org.apache.log4j.*"})
-public class SpoutWrapperTest extends AbstractTest {
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Test
- public void testRunPrepare() throws Exception {
- final StormConfig stormConfig = new StormConfig();
- stormConfig.put(this.r.nextInt(), this.r.nextInt());
- final Configuration flinkConfig = new Configuration();
- flinkConfig.setInteger("testKey", this.r.nextInt());
-
- final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
- when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig)
- .thenReturn(flinkConfig);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
- when(taskContext.getTaskName()).thenReturn("name");
-
- final IRichSpout spout = mock(IRichSpout.class);
- SpoutWrapper spoutWrapper = new SpoutWrapper(spout);
- spoutWrapper.setRuntimeContext(taskContext);
- spoutWrapper.cancel();
-
- // test without configuration
- spoutWrapper.run(mock(SourceContext.class));
- verify(spout).open(any(Map.class), any(TopologyContext.class),
- any(SpoutOutputCollector.class));
-
- // test with StormConfig
- spoutWrapper.run(mock(SourceContext.class));
- verify(spout).open(eq(stormConfig), any(TopologyContext.class),
- any(SpoutOutputCollector.class));
-
- // test with Configuration
- final TestDummySpout testSpout = new TestDummySpout();
- spoutWrapper = new SpoutWrapper(testSpout);
- spoutWrapper.setRuntimeContext(taskContext);
- spoutWrapper.cancel();
-
- spoutWrapper.run(mock(SourceContext.class));
- for (Entry<String, String> entry : flinkConfig.toMap().entrySet()) {
- Assert.assertEquals(entry.getValue(), testSpout.config.get(entry.getKey()));
- }
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void testRunExecuteFixedNumber() throws Exception {
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields("dummy"));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments()
- .thenReturn(declarer);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
-
- final IRichSpout spout = mock(IRichSpout.class);
- final int numberOfCalls = this.r.nextInt(50);
- final SpoutWrapper<?> spoutWrapper = new SpoutWrapper<Object>(spout,
- numberOfCalls);
- spoutWrapper.setRuntimeContext(taskContext);
-
- spoutWrapper.run(mock(SourceContext.class));
- verify(spout, times(numberOfCalls)).nextTuple();
- }
-
- @Test
- public void testRunExecuteFinite() throws Exception {
- final int numberOfCalls = this.r.nextInt(50);
-
- final LinkedList<Tuple1<Integer>> expectedResult = new LinkedList<Tuple1<Integer>>();
- for (int i = numberOfCalls - 1; i >= 0; --i) {
- expectedResult.add(new Tuple1<Integer>(new Integer(i)));
- }
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
-
- final FiniteTestSpout spout = new FiniteTestSpout(numberOfCalls);
- final SpoutWrapper<Tuple1<Integer>> spoutWrapper = new SpoutWrapper<Tuple1<Integer>>(
- spout, -1);
- spoutWrapper.setRuntimeContext(taskContext);
-
- final TestContext collector = new TestContext();
- spoutWrapper.run(collector);
-
- Assert.assertEquals(expectedResult, collector.result);
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void runAndExecuteFiniteSpout() throws Exception {
- final FiniteSpout stormSpout = mock(FiniteSpout.class);
- when(stormSpout.reachedEnd()).thenReturn(false, false, false, true, false, false, true);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
-
- final SpoutWrapper<?> wrapper = new SpoutWrapper<Object>(stormSpout);
- wrapper.setRuntimeContext(taskContext);
-
- wrapper.run(mock(SourceContext.class));
- verify(stormSpout, times(3)).nextTuple();
- }
-
- @SuppressWarnings("unchecked")
- @Test
- public void runAndExecuteFiniteSpout2() throws Exception {
- final FiniteSpout stormSpout = mock(FiniteSpout.class);
- when(stormSpout.reachedEnd()).thenReturn(true, false, true, false, true, false, true);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
-
- final SpoutWrapper<?> wrapper = new SpoutWrapper<Object>(stormSpout);
- wrapper.setRuntimeContext(taskContext);
-
- wrapper.run(mock(SourceContext.class));
- verify(stormSpout, never()).nextTuple();
- }
-
- @Test
- public void testCancel() throws Exception {
- final int numberOfCalls = 5 + this.r.nextInt(5);
-
- final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
- when(taskContext.getExecutionConfig()).thenReturn(mock(ExecutionConfig.class));
- when(taskContext.getTaskName()).thenReturn("name");
-
- final IRichSpout spout = new FiniteTestSpout(numberOfCalls);
-
- final SpoutWrapper<Tuple1<Integer>> spoutWrapper = new SpoutWrapper<Tuple1<Integer>>(spout);
- spoutWrapper.setRuntimeContext(taskContext);
-
- spoutWrapper.cancel();
- final TestContext collector = new TestContext();
- spoutWrapper.run(collector);
-
- Assert.assertEquals(new LinkedList<Tuple1<Integer>>(), collector.result);
- }
-
- @Test
- public void testClose() throws Exception {
- final IRichSpout spout = mock(IRichSpout.class);
- final SpoutWrapper<Tuple1<Integer>> spoutWrapper = new SpoutWrapper<Tuple1<Integer>>(spout);
-
- spoutWrapper.close();
-
- verify(spout).close();
- }
-
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/StormTupleTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/StormTupleTest.java
deleted file mode 100644
index 2ff6c45..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/StormTupleTest.java
+++ /dev/null
@@ -1,710 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple;
-import org.apache.flink.api.java.tuple.Tuple5;
-import org.apache.flink.configuration.ConfigConstants;
-import org.apache.flink.storm.util.AbstractTest;
-
-import org.apache.storm.generated.GlobalStreamId;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.MessageId;
-import org.apache.storm.tuple.Values;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.mockito.Mockito.mock;
-
-/**
- * Tests for the StormTuple.
- */
-public class StormTupleTest extends AbstractTest {
- private static final String fieldName = "fieldName";
- private static final String fieldNamePojo = "member";
-
- private int arity, index;
-
- @Override
- @Before
- public void prepare() {
- super.prepare();
- this.arity = 1 + r.nextInt(25);
- this.index = r.nextInt(this.arity);
- }
-
- @Test
- public void nonTupleTest() {
- final Object flinkTuple = this.r.nextInt();
-
- final StormTuple<Object> tuple = new StormTuple<Object>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertSame(flinkTuple, tuple.getValue(0));
-
- final List<Object> values = tuple.getValues();
- Assert.assertEquals(1, values.size());
- Assert.assertEquals(flinkTuple, values.get(0));
- }
-
- @Test
- public void tupleTest() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = 0; numberOfAttributes < 26; ++numberOfAttributes) {
- final Object[] data = new Object[numberOfAttributes];
-
- final Tuple flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
- for (int i = 0; i < numberOfAttributes; ++i) {
- data[i] = this.r.nextInt();
- flinkTuple.setField(data[i], i);
- }
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- final List<Object> values = tuple.getValues();
-
- Assert.assertEquals(numberOfAttributes, values.size());
- for (int i = 0; i < numberOfAttributes; ++i) {
- Assert.assertEquals(flinkTuple.getField(i), values.get(i));
- }
-
- Assert.assertEquals(numberOfAttributes, tuple.size());
- }
- }
-
- @Test
- public void tupleTestWithTaskId() throws InstantiationException, IllegalAccessException {
- for (int numberOfAttributes = 1; numberOfAttributes < 26; ++numberOfAttributes) {
- final Object[] data = new Object[numberOfAttributes];
-
- final Tuple flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
- for (int i = 0; i < numberOfAttributes - 1; ++i) {
- data[i] = this.r.nextInt();
- flinkTuple.setField(data[i], i);
- }
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, 0, null, null,
- null);
- final List<Object> values = tuple.getValues();
-
- Assert.assertEquals(numberOfAttributes - 1, values.size());
- for (int i = 0; i < numberOfAttributes - 1; ++i) {
- Assert.assertEquals(flinkTuple.getField(i), values.get(i));
- }
-
- Assert.assertEquals(numberOfAttributes - 1, tuple.size());
- }
- }
-
- @Test
- public void testBinary() {
- final byte[] data = new byte[this.r.nextInt(15)];
- this.r.nextBytes(data);
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getBinary(index));
- }
-
- @Test
- public void testBoolean() {
- final Boolean flinkTuple = this.r.nextBoolean();
-
- final StormTuple<Boolean> tuple = new StormTuple<Boolean>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getBoolean(0));
- }
-
- @Test
- public void testByte() {
- final Byte flinkTuple = (byte) this.r.nextInt();
-
- final StormTuple<Byte> tuple = new StormTuple<Byte>(flinkTuple, null, -1, null, null, null);
- Assert.assertEquals(flinkTuple, tuple.getByte(0));
- }
-
- @Test
- public void testDouble() {
- final Double flinkTuple = this.r.nextDouble();
-
- final StormTuple<Double> tuple = new StormTuple<Double>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getDouble(0));
- }
-
- @Test
- public void testFloat() {
- final Float flinkTuple = this.r.nextFloat();
-
- final StormTuple<Float> tuple = new StormTuple<Float>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getFloat(0));
- }
-
- @Test
- public void testInteger() {
- final Integer flinkTuple = this.r.nextInt();
-
- final StormTuple<Integer> tuple = new StormTuple<Integer>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getInteger(0));
- }
-
- @Test
- public void testLong() {
- final Long flinkTuple = this.r.nextLong();
-
- final StormTuple<Long> tuple = new StormTuple<Long>(flinkTuple, null, -1, null, null, null);
- Assert.assertEquals(flinkTuple, tuple.getLong(0));
- }
-
- @Test
- public void testShort() {
- final Short flinkTuple = (short) this.r.nextInt();
-
- final StormTuple<Short> tuple = new StormTuple<Short>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getShort(0));
- }
-
- @Test
- public void testString() {
- final byte[] data = new byte[this.r.nextInt(15)];
- this.r.nextBytes(data);
- final String flinkTuple = new String(data, ConfigConstants.DEFAULT_CHARSET);
-
- final StormTuple<String> tuple = new StormTuple<String>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple, tuple.getString(0));
- }
-
- @Test
- public void testBinaryTuple() {
- final byte[] data = new byte[this.r.nextInt(15)];
- this.r.nextBytes(data);
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getBinary(index));
- }
-
- @Test
- public void testBooleanTuple() {
- final Boolean data = this.r.nextBoolean();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getBoolean(index));
- }
-
- @Test
- public void testByteTuple() {
- final Byte data = (byte) this.r.nextInt();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getByte(index));
- }
-
- @Test
- public void testDoubleTuple() {
- final Double data = this.r.nextDouble();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getDouble(index));
- }
-
- @Test
- public void testFloatTuple() {
- final Float data = this.r.nextFloat();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getFloat(index));
- }
-
- @Test
- public void testIntegerTuple() {
- final Integer data = this.r.nextInt();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getInteger(index));
- }
-
- @Test
- public void testLongTuple() {
- final Long data = this.r.nextLong();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getLong(index));
- }
-
- @Test
- public void testShortTuple() {
- final Short data = (short) this.r.nextInt();
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getShort(index));
- }
-
- @Test
- public void testStringTuple() {
- final byte[] rawdata = new byte[this.r.nextInt(15)];
- this.r.nextBytes(rawdata);
- final String data = new String(rawdata, ConfigConstants.DEFAULT_CHARSET);
-
- final int index = this.r.nextInt(5);
- final Tuple flinkTuple = new Tuple5<Object, Object, Object, Object, Object>();
- flinkTuple.setField(data, index);
-
- final StormTuple<Tuple> tuple = new StormTuple<Tuple>(flinkTuple, null, -1, null, null,
- null);
- Assert.assertEquals(flinkTuple.getField(index), tuple.getString(index));
- }
-
- @Test
- public void testContains() throws Exception {
- Fields schema = new Fields("a1", "a2");
- StormTuple<Object> tuple = new StormTuple<Object>(Tuple.getTupleClass(1).newInstance(),
- schema, -1, null, null, null);
-
- Assert.assertTrue(tuple.contains("a1"));
- Assert.assertTrue(tuple.contains("a2"));
- Assert.assertFalse(tuple.contains("a3"));
- }
-
- @Test
- public void testGetFields() throws Exception {
- Fields schema = new Fields();
- Assert.assertSame(schema,
- new StormTuple<Object>(null, schema, -1, null, null, null).getFields());
-
- }
-
- @Test
- public void testFieldIndex() throws Exception {
- Fields schema = new Fields("a1", "a2");
- StormTuple<Object> tuple = new StormTuple<Object>(Tuple.getTupleClass(1).newInstance(),
- schema, -1, null, null, null);
-
- Assert.assertEquals(0, tuple.fieldIndex("a1"));
- Assert.assertEquals(1, tuple.fieldIndex("a2"));
- }
-
- @Test
- public void testSelect() throws Exception {
- Tuple tuple = Tuple.getTupleClass(arity).newInstance();
- Values values = new Values();
-
- ArrayList<String> attributeNames = new ArrayList<String>(arity);
- ArrayList<String> filterNames = new ArrayList<String>(arity);
-
- for (int i = 0; i < arity; ++i) {
- tuple.setField(i, i);
- attributeNames.add("a" + i);
-
- if (r.nextBoolean()) {
- filterNames.add("a" + i);
- values.add(i);
- }
- }
- Fields schema = new Fields(attributeNames);
- Fields selector = new Fields(filterNames);
-
- Assert.assertEquals(values,
- new StormTuple<>(tuple, schema, -1, null, null, null).select(selector));
- }
-
- @Test
- public void testGetValueByField() throws Exception {
- Object value = mock(Object.class);
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getValueByField(fieldName));
-
- }
-
- @Test
- public void testGetValueByFieldPojo() throws Exception {
- Object value = mock(Object.class);
- TestPojoMember<Object> pojo = new TestPojoMember<Object>(value);
- StormTuple<TestPojoMember<Object>> tuple = new StormTuple<TestPojoMember<Object>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getValueByField(fieldNamePojo));
- }
-
- @Test
- public void testGetValueByFieldPojoGetter() throws Exception {
- Object value = mock(Object.class);
- TestPojoGetter<Object> pojo = new TestPojoGetter<Object>(value);
- StormTuple<TestPojoGetter<Object>> tuple = new StormTuple<TestPojoGetter<Object>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getValueByField(fieldNamePojo));
- }
-
- @Test
- public void testGetStringByField() throws Exception {
- String value = "stringValue";
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getStringByField(fieldName));
- }
-
- @Test
- public void testGetStringByFieldPojo() throws Exception {
- String value = "stringValue";
- TestPojoMember<String> pojo = new TestPojoMember<String>(value);
- StormTuple<TestPojoMember<String>> tuple = new StormTuple<TestPojoMember<String>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getStringByField(fieldNamePojo));
- }
-
- @Test
- public void testGetStringByFieldPojoGetter() throws Exception {
- String value = "stringValue";
- TestPojoGetter<String> pojo = new TestPojoGetter<String>(value);
- StormTuple<TestPojoGetter<String>> tuple = new StormTuple<TestPojoGetter<String>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getStringByField(fieldNamePojo));
- }
-
- @Test
- public void testGetIntegerByField() throws Exception {
- Integer value = r.nextInt();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getIntegerByField(fieldName));
- }
-
- @Test
- public void testGetIntegerByFieldPojo() throws Exception {
- Integer value = r.nextInt();
- TestPojoMember<Integer> pojo = new TestPojoMember<Integer>(value);
- StormTuple<TestPojoMember<Integer>> tuple = new StormTuple<TestPojoMember<Integer>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getIntegerByField(fieldNamePojo));
- }
-
- @Test
- public void testGetIntegerByFieldPojoGetter() throws Exception {
- Integer value = r.nextInt();
- TestPojoGetter<Integer> pojo = new TestPojoGetter<Integer>(value);
- StormTuple<TestPojoGetter<Integer>> tuple = new StormTuple<TestPojoGetter<Integer>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getIntegerByField(fieldNamePojo));
- }
-
- @Test
- public void testGetLongByField() throws Exception {
- Long value = r.nextLong();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getLongByField(fieldName));
- }
-
- @Test
- public void testGetLongByFieldPojo() throws Exception {
- Long value = r.nextLong();
- TestPojoMember<Long> pojo = new TestPojoMember<Long>(value);
- StormTuple<TestPojoMember<Long>> tuple = new StormTuple<TestPojoMember<Long>>(pojo, null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getLongByField(fieldNamePojo));
- }
-
- @Test
- public void testGetLongByFieldPojoGetter() throws Exception {
- Long value = r.nextLong();
- TestPojoGetter<Long> pojo = new TestPojoGetter<Long>(value);
- StormTuple<TestPojoGetter<Long>> tuple = new StormTuple<TestPojoGetter<Long>>(pojo, null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getLongByField(fieldNamePojo));
- }
-
- @Test
- public void testGetBooleanByField() throws Exception {
- Boolean value = r.nextBoolean();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertEquals(value, tuple.getBooleanByField(fieldName));
- }
-
- @Test
- public void testGetBooleanByFieldPojo() throws Exception {
- Boolean value = r.nextBoolean();
- TestPojoMember<Boolean> pojo = new TestPojoMember<Boolean>(value);
- StormTuple<TestPojoMember<Boolean>> tuple = new StormTuple<TestPojoMember<Boolean>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getBooleanByField(fieldNamePojo));
- }
-
- @Test
- public void testGetBooleanByFieldPojoGetter() throws Exception {
- Boolean value = r.nextBoolean();
- TestPojoGetter<Boolean> pojo = new TestPojoGetter<Boolean>(value);
- StormTuple<TestPojoGetter<Boolean>> tuple = new StormTuple<TestPojoGetter<Boolean>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getBooleanByField(fieldNamePojo));
- }
-
- @Test
- public void testGetShortByField() throws Exception {
- Short value = (short) r.nextInt();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getShortByField(fieldName));
- }
-
- @Test
- public void testGetShortByFieldPojo() throws Exception {
- Short value = (short) r.nextInt();
- TestPojoMember<Short> pojo = new TestPojoMember<Short>(value);
- StormTuple<TestPojoMember<Short>> tuple = new StormTuple<TestPojoMember<Short>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getShortByField(fieldNamePojo));
- }
-
- @Test
- public void testGetShortByFieldPojoGetter() throws Exception {
- Short value = (short) r.nextInt();
- TestPojoGetter<Short> pojo = new TestPojoGetter<Short>(value);
- StormTuple<TestPojoGetter<Short>> tuple = new StormTuple<TestPojoGetter<Short>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getShortByField(fieldNamePojo));
- }
-
- @Test
- public void testGetByteByField() throws Exception {
- Byte value = new Byte((byte) r.nextInt());
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getByteByField(fieldName));
- }
-
- @Test
- public void testGetByteByFieldPojo() throws Exception {
- Byte value = new Byte((byte) r.nextInt());
- TestPojoMember<Byte> pojo = new TestPojoMember<Byte>(value);
- StormTuple<TestPojoMember<Byte>> tuple = new StormTuple<TestPojoMember<Byte>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getByteByField(fieldNamePojo));
- }
-
- @Test
- public void testGetByteByFieldPojoGetter() throws Exception {
- Byte value = new Byte((byte) r.nextInt());
- TestPojoGetter<Byte> pojo = new TestPojoGetter<Byte>(value);
- StormTuple<TestPojoGetter<Byte>> tuple = new StormTuple<TestPojoGetter<Byte>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getByteByField(fieldNamePojo));
- }
-
- @Test
- public void testGetDoubleByField() throws Exception {
- Double value = r.nextDouble();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getDoubleByField(fieldName));
- }
-
- @Test
- public void testGetDoubleByFieldPojo() throws Exception {
- Double value = r.nextDouble();
- TestPojoMember<Double> pojo = new TestPojoMember<Double>(value);
- StormTuple<TestPojoMember<Double>> tuple = new StormTuple<TestPojoMember<Double>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getDoubleByField(fieldNamePojo));
- }
-
- @Test
- public void testGetDoubleByFieldPojoGetter() throws Exception {
- Double value = r.nextDouble();
- TestPojoGetter<Double> pojo = new TestPojoGetter<Double>(value);
- StormTuple<TestPojoGetter<Double>> tuple = new StormTuple<TestPojoGetter<Double>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(value, tuple.getDoubleByField(fieldNamePojo));
- }
-
- @Test
- public void testGetFloatByField() throws Exception {
- Float value = r.nextFloat();
- StormTuple<?> tuple = testGetByField(arity, index, value);
- Assert.assertSame(value, tuple.getFloatByField(fieldName));
- }
-
- @Test
- public void testGetFloatByFieldPojo() throws Exception {
- Float value = r.nextFloat();
- TestPojoMember<Float> pojo = new TestPojoMember<Float>(value);
- StormTuple<TestPojoMember<Float>> tuple = new StormTuple<TestPojoMember<Float>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getFloatByField(fieldNamePojo));
- }
-
- @Test
- public void testGetFloatByFieldPojoGetter() throws Exception {
- Float value = r.nextFloat();
- TestPojoGetter<Float> pojo = new TestPojoGetter<Float>(value);
- StormTuple<TestPojoGetter<Float>> tuple = new StormTuple<TestPojoGetter<Float>>(pojo,
- null,
- -1, null, null, null);
- Assert.assertSame(value, tuple.getFloatByField(fieldNamePojo));
- }
-
- @Test
- public void testGetBinaryByField() throws Exception {
- byte[] data = new byte[1 + r.nextInt(20)];
- r.nextBytes(data);
- StormTuple<?> tuple = testGetByField(arity, index, data);
- Assert.assertSame(data, tuple.getBinaryByField(fieldName));
- }
-
- @Test
- public void testGetBinaryFieldPojo() throws Exception {
- byte[] data = new byte[1 + r.nextInt(20)];
- r.nextBytes(data);
- TestPojoMember<byte[]> pojo = new TestPojoMember<byte[]>(data);
- StormTuple<TestPojoMember<byte[]>> tuple = new StormTuple<TestPojoMember<byte[]>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(data, tuple.getBinaryByField(fieldNamePojo));
- }
-
- @Test
- public void testGetBinaryByFieldPojoGetter() throws Exception {
- byte[] data = new byte[1 + r.nextInt(20)];
- r.nextBytes(data);
- TestPojoGetter<byte[]> pojo = new TestPojoGetter<byte[]>(data);
- StormTuple<TestPojoGetter<byte[]>> tuple = new StormTuple<TestPojoGetter<byte[]>>(pojo,
- null, -1, null, null, null);
- Assert.assertSame(data, tuple.getBinaryByField(fieldNamePojo));
- }
-
- private <T> StormTuple<?> testGetByField(int arity, int index, T value)
- throws Exception {
-
- assert (index < arity);
-
- Tuple tuple = Tuple.getTupleClass(arity).newInstance();
- tuple.setField(value, index);
-
- ArrayList<String> attributeNames = new ArrayList<String>(arity);
- for (int i = 0; i < arity; ++i) {
- if (i == index) {
- attributeNames.add(fieldName);
- } else {
- attributeNames.add("" + i);
- }
- }
- Fields schema = new Fields(attributeNames);
-
- return new StormTuple<>(tuple, schema, -1, null, null, null);
- }
-
- @Test
- public void testGetSourceGlobalStreamid() {
- GlobalStreamId globalStreamid = new StormTuple<>(null, null, -1, "streamId", "componentID",
- null).getSourceGlobalStreamid();
- Assert.assertEquals("streamId", globalStreamid.get_streamId());
- Assert.assertEquals("componentID", globalStreamid.get_componentId());
- }
-
- @Test
- public void testGetSourceComponent() {
- String sourceComponent = new StormTuple<>(null, null, -1, null, "componentID", null)
- .getSourceComponent();
- Assert.assertEquals("componentID", sourceComponent);
- }
-
- @Test
- public void testGetSourceTask() {
- int sourceTaskId = new StormTuple<>(null, null, 42, null, null, null).getSourceTask();
- Assert.assertEquals(42, sourceTaskId);
- }
-
- @Test
- public void testGetSourceStreamId() {
- String sourceStreamId = new StormTuple<>(null, null, -1, "streamId", null, null)
- .getSourceStreamId();
- Assert.assertEquals("streamId", sourceStreamId);
- }
-
- @Test
- public void testGetMessageId() {
- MessageId messageId = MessageId.makeUnanchored();
- StormTuple<?> stormTuple = new StormTuple<>(null, null, -1, null, null, messageId);
- Assert.assertSame(messageId, stormTuple.getMessageId());
- }
-
- private static class TestPojoMember<T> {
- public T member;
-
- public TestPojoMember(T value) {
- this.member = value;
- }
- }
-
- private static class TestPojoGetter<T> {
- private T member;
-
- public TestPojoGetter(T value) {
- this.member = value;
- }
-
- public T getMember() {
- return this.member;
- }
- }
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/TestContext.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/TestContext.java
deleted file mode 100644
index 58aad7b..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/TestContext.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.api.java.tuple.Tuple1;
-import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext;
-import org.apache.flink.streaming.api.watermark.Watermark;
-
-import java.util.LinkedList;
-
-class TestContext implements SourceContext<Tuple1<Integer>> {
- public LinkedList<Tuple1<Integer>> result = new LinkedList<Tuple1<Integer>>();
-
- public TestContext() {
- }
-
- @Override
- public void collect(final Tuple1<Integer> record) {
- this.result.add(record.copy());
- }
-
- @Override
- public void collectWithTimestamp(Tuple1<Integer> element, long timestamp) {
- this.result.add(element.copy());
- }
-
- @Override
- public void emitWatermark(Watermark mark) {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public void markAsTemporarilyIdle() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public Object getCheckpointLock() {
- return null;
- }
-
- @Override
- public void close() {
-
- }
-}
diff --git a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/WrapperSetupHelperTest.java b/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/WrapperSetupHelperTest.java
deleted file mode 100644
index 3118d6b..0000000
--- a/flink-contrib/flink-storm/src/test/java/org/apache/flink/storm/wrappers/WrapperSetupHelperTest.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.storm.wrappers;
-
-import org.apache.flink.storm.util.AbstractTest;
-
-import org.apache.storm.topology.IComponent;
-import org.apache.storm.topology.IRichBolt;
-import org.apache.storm.topology.IRichSpout;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.utils.Utils;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.util.HashMap;
-import java.util.HashSet;
-
-import static java.util.Collections.singleton;
-import static org.mockito.Mockito.mock;
-
-/**
- * Tests for the WrapperSetupHelper.
- */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest(WrapperSetupHelper.class)
-@PowerMockIgnore({"javax.*", "org.apache.log4j.*"})
-public class WrapperSetupHelperTest extends AbstractTest {
-
- @Test
- public void testEmptyDeclarerBolt() {
- IComponent boltOrSpout;
-
- if (this.r.nextBoolean()) {
- boltOrSpout = mock(IRichSpout.class);
- } else {
- boltOrSpout = mock(IRichBolt.class);
- }
-
- Assert.assertEquals(new HashMap<String, Integer>(),
- WrapperSetupHelper.getNumberOfAttributes(boltOrSpout, null));
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testRawType() throws Exception {
- IComponent boltOrSpout;
-
- if (this.r.nextBoolean()) {
- boltOrSpout = mock(IRichSpout.class);
- } else {
- boltOrSpout = mock(IRichBolt.class);
- }
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields("dummy1", "dummy2"));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- WrapperSetupHelper.getNumberOfAttributes(boltOrSpout,
- new HashSet<String>(singleton(Utils.DEFAULT_STREAM_ID)));
- }
-
- @Test(expected = IllegalArgumentException.class)
- public void testToManyAttributes() throws Exception {
- IComponent boltOrSpout;
-
- if (this.r.nextBoolean()) {
- boltOrSpout = mock(IRichSpout.class);
- } else {
- boltOrSpout = mock(IRichBolt.class);
- }
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- final String[] schema = new String[26];
- for (int i = 0; i < schema.length; ++i) {
- schema[i] = "a" + i;
- }
- declarer.declare(new Fields(schema));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- WrapperSetupHelper.getNumberOfAttributes(boltOrSpout, null);
- }
-
- @Test
- public void testTupleTypes() throws Exception {
- for (int i = -1; i < 26; ++i) {
- this.testTupleTypes(i);
- }
- }
-
- private void testTupleTypes(final int numberOfAttributes) throws Exception {
- String[] schema;
- if (numberOfAttributes == -1) {
- schema = new String[1];
- } else {
- schema = new String[numberOfAttributes];
- }
- for (int i = 0; i < schema.length; ++i) {
- schema[i] = "a" + i;
- }
-
- IComponent boltOrSpout;
- if (this.r.nextBoolean()) {
- boltOrSpout = mock(IRichSpout.class);
- } else {
- boltOrSpout = mock(IRichBolt.class);
- }
-
- final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
- declarer.declare(new Fields(schema));
- PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
-
- HashMap<String, Integer> attributes = new HashMap<String, Integer>();
- attributes.put(Utils.DEFAULT_STREAM_ID, numberOfAttributes);
-
- Assert.assertEquals(attributes, WrapperSetupHelper.getNumberOfAttributes(
- boltOrSpout,
- numberOfAttributes == -1 ? new HashSet<String>(singleton(Utils.DEFAULT_STREAM_ID)) : null));
- }
-}
diff --git a/flink-contrib/flink-storm/src/test/resources/log4j-test.properties b/flink-contrib/flink-storm/src/test/resources/log4j-test.properties
deleted file mode 100644
index 881dc06..0000000
--- a/flink-contrib/flink-storm/src/test/resources/log4j-test.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-################################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-################################################################################
-
-# Set root logger level to DEBUG and its only appender to A1.
-log4j.rootLogger=OFF, A1
-
-# A1 is set to be a ConsoleAppender.
-log4j.appender.A1=org.apache.log4j.ConsoleAppender
-
-# A1 uses PatternLayout.
-log4j.appender.A1.layout=org.apache.log4j.PatternLayout
-log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
diff --git a/flink-contrib/pom.xml b/flink-contrib/pom.xml
index 7ea9b19..ad81ad6 100644
--- a/flink-contrib/pom.xml
+++ b/flink-contrib/pom.xml
@@ -37,8 +37,6 @@ under the License.
<packaging>pom</packaging>
<modules>
- <module>flink-storm</module>
- <module>flink-storm-examples</module>
<module>flink-connector-wikiedits</module>
</modules>
diff --git a/tools/travis/stage.sh b/tools/travis/stage.sh
index 53f6bee..1e43fe8 100644
--- a/tools/travis/stage.sh
+++ b/tools/travis/stage.sh
@@ -39,8 +39,6 @@ flink-streaming-java,\
flink-streaming-scala"
MODULES_LIBRARIES="\
-flink-contrib/flink-storm,\
-flink-contrib/flink-storm-examples,\
flink-libraries/flink-cep,\
flink-libraries/flink-cep-scala,\
flink-libraries/flink-gelly,\