You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@streams.apache.org by sb...@apache.org on 2018/01/10 06:08:29 UTC

[streams] 01/01: STREAMS-574: Merge streams-examples into streams repo

This is an automated email from the ASF dual-hosted git repository.

sblackmon pushed a commit to branch STREAMS-574
in repository https://gitbox.apache.org/repos/asf/streams.git

commit 5edc80a2b9d248a66eaa16df0fbd9ad15122fdd8
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
AuthorDate: Wed Jan 10 00:08:14 2018 -0600

    STREAMS-574: Merge streams-examples into streams repo
    
    STREAMS-574: Merge streams-examples into streams repo (https://issues.apache.org/jira/browse/STREAMS-574)
---
 pom.xml                                            |   59 +-
 streams-examples/pom.xml                           |   42 +
 .../flink-twitter-collection/README.md             |    8 +
 .../flink-twitter-collection/pom.xml               |  467 +++++++++
 .../main/jsonschema/FlinkBatchConfiguration.json   |   12 +
 .../jsonschema/FlinkStreamingConfiguration.json    |   40 +
 .../main/jsonschema/StreamsFlinkConfiguration.json |   49 +
 .../TwitterFollowingPipelineConfiguration.json     |   29 +
 .../TwitterPostsPipelineConfiguration.json         |   29 +
 .../TwitterSpritzerPipelineConfiguration.json      |   29 +
 ...witterUserInformationPipelineConfiguration.json |   29 +
 .../resources/FlinkTwitterFollowingPipeline.dot    |   37 +
 .../main/resources/FlinkTwitterPostsPipeline.dot   |   37 +
 .../resources/FlinkTwitterSpritzerPipeline.dot     |   33 +
 .../FlinkTwitterUserInformationPipeline.dot        |   37 +
 .../apache/streams/examples/flink/FlinkBase.scala  |  221 +++++
 .../collection/FlinkTwitterFollowingPipeline.scala |  178 ++++
 .../collection/FlinkTwitterPostsPipeline.scala     |  175 ++++
 .../collection/FlinkTwitterSpritzerPipeline.scala  |  175 ++++
 .../FlinkTwitterUserInformationPipeline.scala      |  184 ++++
 .../site/markdown/FlinkTwitterFollowingPipeline.md |   52 +
 .../src/site/markdown/FlinkTwitterPostsPipeline.md |   48 +
 .../site/markdown/FlinkTwitterSpritzerPipeline.md  |   48 +
 .../FlinkTwitterUserInformationPipeline.md         |   48 +
 .../src/site/markdown/index.md                     |   32 +
 .../flink-twitter-collection/src/site/site.xml     |   28 +
 .../src/test/resources/1000twitterids.txt          | 1000 ++++++++++++++++++++
 .../FlinkTwitterFollowingPipelineFollowersIT.conf  |   36 +
 .../FlinkTwitterFollowingPipelineFriendsIT.conf    |   35 +
 .../resources/FlinkTwitterPostsPipelineIT.conf     |   31 +
 .../resources/FlinkTwitterSpritzerPipelineIT.conf  |   31 +
 .../FlinkTwitterUserInformationPipelineIT.conf     |   31 +
 .../src/test/resources/asf.txt                     |    1 +
 .../FlinkTwitterFollowingPipelineFollowersIT.scala |   73 ++
 .../FlinkTwitterFollowingPipelineFriendsIT.scala   |   73 ++
 .../twitter/test/FlinkTwitterPostsPipelineIT.scala |   73 ++
 .../test/FlinkTwitterSpritzerPipelineIT.scala      |   74 ++
 .../FlinkTwitterUserInformationPipelineIT.scala    |   73 ++
 streams-examples/streams-examples-flink/pom.xml    |   44 +
 .../elasticsearch-hdfs/README.md                   |    8 +
 .../elasticsearch-hdfs/pom.xml                     |  322 +++++++
 .../apache/streams/example/ElasticsearchHdfs.java  |   73 ++
 .../apache/streams/example/HdfsElasticsearch.java  |   73 ++
 .../jsonschema/ElasticsearchHdfsConfiguration.json |   13 +
 .../jsonschema/HdfsElasticsearchConfiguration.json |   13 +
 .../src/main/resources/ElasticsearchHdfs.dot       |   35 +
 .../src/main/resources/HdfsElasticsearch.dot       |   35 +
 .../src/site/markdown/ElasticsearchHdfs.md         |   49 +
 .../src/site/markdown/HdfsElasticsearch.md         |   51 +
 .../elasticsearch-hdfs/src/site/markdown/index.md  |   32 +
 .../elasticsearch-hdfs/src/site/site.xml           |   25 +
 .../streams/example/test/ElasticsearchHdfsIT.java  |  102 ++
 .../streams/example/test/HdfsElasticsearchIT.java  |  109 +++
 .../src/test/resources/ElasticsearchHdfsIT.conf    |   31 +
 .../src/test/resources/HdfsElasticsearchIT.conf    |   33 +
 .../src/test/resources/log4j.properties            |   26 +
 .../src/test/resources/logback.xml                 |   46 +
 .../src/test/resources/testBackup.json             |   26 +
 .../src/test/resources/testRestore.json            |   22 +
 .../src/test/resources/testng.xml                  |   36 +
 .../elasticsearch-reindex/README.md                |    8 +
 .../elasticsearch-reindex/pom.xml                  |  278 ++++++
 .../streams/example/ElasticsearchReindex.java      |   78 ++
 .../ElasticsearchReindexConfiguration.json         |   13 +
 .../src/main/resources/ElasticsearchReindex.dot    |   35 +
 .../src/main/resources/application.json            |   29 +
 .../src/main/resources/log4j.properties            |   26 +
 .../src/main/resources/logback.xml                 |   46 +
 .../src/site/markdown/ElasticsearchReindex.md      |   49 +
 .../src/site/markdown/index.md                     |   32 +
 .../elasticsearch-reindex/src/site/site.xml        |   25 +
 .../example/test/ElasticsearchReindexChildIT.java  |  112 +++
 .../example/test/ElasticsearchReindexIT.java       |  111 +++
 .../example/test/ElasticsearchReindexParentIT.java |  125 +++
 .../resources/ElasticsearchReindexChildIT.conf     |   37 +
 .../src/test/resources/ElasticsearchReindexIT.conf |   36 +
 .../resources/ElasticsearchReindexParentIT.conf    |   37 +
 .../src/test/resources/testng.xml                  |   42 +
 .../mongo-elasticsearch-sync/README.md             |    8 +
 .../mongo-elasticsearch-sync/pom.xml               |  282 ++++++
 .../streams/example/MongoElasticsearchSync.java    |   77 ++
 .../MongoElasticsearchSyncConfiguration.json       |   13 +
 .../src/main/resources/MongoElasticsearchSync.dot  |   36 +
 .../src/site/markdown/MongoElasticsearchSync.md    |   50 +
 .../src/site/markdown/index.md                     |   31 +
 .../mongo-elasticsearch-sync/src/site/site.xml     |   26 +
 .../example/test/MongoElasticsearchSyncIT.java     |  104 ++
 .../test/resources/MongoElasticsearchSyncIT.conf   |   35 +
 .../src/test/resources/testng.xml                  |   30 +
 streams-examples/streams-examples-local/pom.xml    |   49 +
 .../twitter-follow-neo4j/README.md                 |    8 +
 .../twitter-follow-neo4j/pom.xml                   |  256 +++++
 .../apache/streams/example/TwitterFollowNeo4j.java |  104 ++
 .../TwitterFollowNeo4jConfiguration.json           |   13 +
 .../src/main/resources/TwitterFollowNeo4j.dot      |   39 +
 .../src/site/markdown/TwitterFollowNeo4j.md        |   45 +
 .../src/site/markdown/index.md                     |   31 +
 .../twitter-follow-neo4j/src/site/site.xml         |   28 +
 .../streams/example/test/TwitterFollowNeo4jIT.java |  119 +++
 .../src/test/resources/TwitterFollowNeo4jIT.conf   |   32 +
 .../twitter-history-elasticsearch/README.md        |    8 +
 .../twitter-history-elasticsearch/pom.xml          |  284 ++++++
 .../example/TwitterHistoryElasticsearch.java       |   84 ++
 .../TwitterHistoryElasticsearchConfiguration.json  |   13 +
 .../main/resources/TwitterHistoryElasticsearch.dot |   39 +
 .../site/markdown/TwitterHistoryElasticsearch.md   |   53 ++
 .../src/site/markdown/index.md                     |   42 +
 .../src/site/site.xml                              |   28 +
 .../test/TwitterHistoryElasticsearchIT.java        |  104 ++
 .../resources/TwitterHistoryElasticsearchIT.conf   |   31 +
 .../twitter-userstream-elasticsearch/README.md     |    8 +
 .../twitter-userstream-elasticsearch/pom.xml       |  237 +++++
 .../example/TwitterUserstreamElasticsearch.java    |  147 +++
 ...witterUserstreamElasticsearchConfiguration.json |   13 +
 .../resources/TwitterUserstreamElasticsearch.dot   |   46 +
 .../markdown/TwitterUserstreamElasticsearch.md     |   43 +
 .../src/site/markdown/index.md                     |   31 +
 .../src/site/site.xml                              |   28 +
 .../test/TwitterUserstreamElasticsearchIT.java     |  106 +++
 .../TwitterUserstreamElasticsearchIT.conf          |   32 +
 120 files changed, 8772 insertions(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 32f415d..936995a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -346,7 +346,7 @@
         <scalastyle.plugin.version>0.8.0</scalastyle.plugin.version>
         <scm.plugin.version>1.9.5</scm.plugin.version>
         <scmpublish.plugin.version>1.1</scmpublish.plugin.version>
-        <shade.plugin.version>2.4.3</shade.plugin.version>
+        <shade.plugin.version>3.1.0</shade.plugin.version>
         <site.plugin.version>3.6</site.plugin.version>
         <source.plugin.version>2.4</source.plugin.version>
         <surefire.plugin.version>2.19.1</surefire.plugin.version>
@@ -408,6 +408,7 @@
         <module>streams-config</module>
         <module>streams-contrib</module>
         <module>streams-components</module>
+        <module>streams-examples</module>
         <module>streams-monitoring</module>
         <module>streams-pojo</module>
         <module>streams-pojo-extensions</module>
@@ -478,6 +479,22 @@
                 <plugin>
                     <artifactId>maven-clean-plugin</artifactId>
                     <version>${clean.plugin.version}</version>
+                    <configuration>
+                        <filesets>
+                            <fileset>
+                                <directory>src/site/resources</directory>
+                                <followSymlinks>false</followSymlinks>
+                            </fileset>
+                            <!-- this is here because elasticsearch integration tests don't have a setting to change directory where temp index files get created -->
+                            <fileset>
+                                <directory>data</directory>
+                                <followSymlinks>false</followSymlinks>
+                            </fileset>
+                            <fileset>
+                                <directory>dist</directory>
+                            </fileset>
+                        </filesets>
+                    </configuration>
                 </plugin>
                 <plugin>
                     <artifactId>maven-compiler-plugin</artifactId>
@@ -1354,6 +1371,46 @@
             </build>
         </profile>
         <profile>
+            <id>dist</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-shade-plugin</artifactId>
+                        <version>${shade.plugin.version}</version>
+                        <configuration>
+                            <outputDirectory>dist</outputDirectory>
+                            <outputFile>dist/${project.artifactId}-jar-with-dependencies.jar</outputFile>
+                            <filters>
+                                <filter>
+                                    <artifact>*:*</artifact>
+                                    <excludes>
+                                        <exclude>**/META-INF/**</exclude>
+                                    </excludes>
+                                </filter>
+                            </filters>
+                            <transformers>
+                                <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
+                                    <resource>reference.conf</resource>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>shade</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
             <id>apache-release</id>
             <properties>
                 <skipTests>false</skipTests>
diff --git a/streams-examples/pom.xml b/streams-examples/pom.xml
new file mode 100644
index 0000000..27990fb
--- /dev/null
+++ b/streams-examples/pom.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>apache-streams</artifactId>
+        <groupId>org.apache.streams</groupId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <groupId>org.apache.streams.examples</groupId>
+    <artifactId>streams-examples</artifactId>
+    <version>0.5.2-SNAPSHOT</version>
+
+    <packaging>pom</packaging>
+    <name>streams-examples</name>
+
+    <modules>
+        <module>streams-examples-flink</module>
+        <module>streams-examples-local</module>
+    </modules>
+
+</project>
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/README.md b/streams-examples/streams-examples-flink/flink-twitter-collection/README.md
new file mode 100644
index 0000000..f9fe687
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:flink-twitter-collection
+===========================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/pom.xml b/streams-examples/streams-examples-flink/flink-twitter-collection/pom.xml
new file mode 100644
index 0000000..25a8004
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/pom.xml
@@ -0,0 +1,467 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-flink</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-flink-twitter-collection</artifactId>
+    <name>flink-twitter-collection</name>
+
+    <description>Collects twitter documents using flink.</description>
+
+    <properties>
+        <testng.version>6.9.10</testng.version>
+        <hdfs.version>2.7.0</hdfs.version>
+        <flink.version>1.2.0</flink.version>
+        <scala.version>2.10.6</scala.version>
+        <scalatest.version>2.2.5</scalatest.version>
+        <scala.suffix>2.10</scala.suffix>
+        <scala-maven.plugin.version>3.2.2</scala-maven.plugin.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-compiler</artifactId>
+            <version>${scala.version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-library</artifactId>
+            <version>${scala.version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-reflect</artifactId>
+            <version>${scala.version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.scalatest</groupId>
+            <artifactId>scalatest_${scala.suffix}</artifactId>
+            <version>${scalatest.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-util</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-pojo</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-provider-twitter</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-hdfs</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-hdfs</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${hdfs.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-java</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.zookeeper</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-clients_${scala.suffix}</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.zookeeper</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-scala_${scala.suffix}</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.zookeeper</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-connector-filesystem_2.10</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-streaming-scala_${scala.suffix}</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.flink</groupId>
+            <artifactId>flink-metrics-core</artifactId>
+            <version>${flink.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.zookeeper</groupId>
+            <artifactId>zookeeper</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/scala</sourceDirectory>
+        <testSourceDirectory>src/test/scala</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>net.alchim31.maven</groupId>
+                <artifactId>scala-maven-plugin</artifactId>
+                <version>${scala-maven.plugin.version}</version>
+                <executions>
+                    <execution>
+                        <id>scala-compile-first</id>
+                        <phase>process-resources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                            <goal>compile</goal>
+                        </goals>
+                    </execution>
+                    <execution>
+                        <id>scala-test-compile</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>testCompile</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.examples.flink.twitter</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-hdfs</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-provider-twitter</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <configuration>
+                    <!-- Run integration test suite rather than individual tests. -->
+                    <excludes>
+                        <exclude>**/*Test.java</exclude>
+                        <exclude>**/*Tests.java</exclude>
+                    </excludes>
+                    <includes>
+                        <include>**/*IT.java</include>
+                        <include>**/*ITs.java</include>
+                    </includes>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludes combine.children="append">
+                        <exclude>src/test/resources/1000twitterids.txt</exclude>
+                        <exclude>src/test/resources/asf.txt</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkBatchConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkBatchConfiguration.json
new file mode 100644
index 0000000..30a2942
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkBatchConfiguration.json
@@ -0,0 +1,12 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.flink.FlinkBatchConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "StreamsFlinkConfiguration.json"
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkStreamingConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkStreamingConfiguration.json
new file mode 100644
index 0000000..0d63f4e
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/FlinkStreamingConfiguration.json
@@ -0,0 +1,40 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.flink.FlinkStreamingConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "StreamsFlinkConfiguration.json"
+  },
+  "properties": {
+    "parallel": {
+      "type": "integer",
+      "default": 1
+    },
+    "providerWaitMs": {
+      "type": "integer",
+      "default": 1000
+    },
+    "checkpointIntervalMs": {
+      "type": "integer",
+      "default": 300000
+    },
+    "checkpointTimeoutMs": {
+      "type": "integer",
+      "default": 30000
+    },
+    "restartAttempts": {
+      "type": "integer",
+      "description": "number of restart attempts",
+      "default": 3
+    },
+    "restartDelayMs": {
+      "type": "integer",
+      "description": "delay in milliseconds",
+      "default": 10000
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/StreamsFlinkConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/StreamsFlinkConfiguration.json
new file mode 100644
index 0000000..6d199b6
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/StreamsFlinkConfiguration.json
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.flink.StreamsFlinkConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$comment": "Absolute $ref: http://streams.apache.org/streams-config/StreamsConfiguration.json",
+    "$ref": "../../../../../../streams-config/src/main/jsonschema/StreamsConfiguration.json"
+  },
+  "properties": {
+    "parallel": {
+      "type": "integer",
+      "default": 1
+    },
+    "providerWaitMs": {
+      "type": "integer",
+      "default": 1000
+    },
+    "checkpointIntervalMs": {
+      "type": "integer",
+      "default": 300000
+    },
+    "checkpointTimeoutMs": {
+      "type": "integer",
+      "default": 30000
+    },
+    "test": {
+      "type": "boolean",
+      "default": false
+    },
+    "local": {
+      "type": "boolean",
+      "default": true
+    },
+    "restartAttempts": {
+      "type": "integer",
+      "description": "number of restart attempts",
+      "default": 3
+    },
+    "restartDelayMs": {
+      "type": "integer",
+      "description": "delay in milliseconds",
+      "default": 10000
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterFollowingPipelineConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterFollowingPipelineConfiguration.json
new file mode 100644
index 0000000..de4f9bb
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterFollowingPipelineConfiguration.json
@@ -0,0 +1,29 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.examples.flink.twitter.TwitterFollowingPipelineConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "FlinkStreamingConfiguration.json"
+  },
+  "properties": {
+    "twitter": {
+      "type": "object",
+      "javaType": "org.apache.streams.twitter.TwitterFollowingConfiguration"
+    },
+    "source": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsReaderConfiguration"
+    },
+    "destination": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsWriterConfiguration"
+    },
+    "providerWaitMs": {
+      "type": "integer"
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterPostsPipelineConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterPostsPipelineConfiguration.json
new file mode 100644
index 0000000..e994ad5
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterPostsPipelineConfiguration.json
@@ -0,0 +1,29 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.examples.flink.twitter.TwitterPostsPipelineConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "FlinkStreamingConfiguration.json"
+  },
+  "properties": {
+    "twitter": {
+      "type": "object",
+      "javaType": "org.apache.streams.twitter.TwitterTimelineProviderConfiguration"
+    },
+    "source": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsReaderConfiguration"
+    },
+    "destination": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsWriterConfiguration"
+    },
+    "providerWaitMs": {
+      "type": "integer"
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterSpritzerPipelineConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterSpritzerPipelineConfiguration.json
new file mode 100644
index 0000000..49d0d1e
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterSpritzerPipelineConfiguration.json
@@ -0,0 +1,29 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.examples.flink.twitter.TwitterSpritzerPipelineConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "FlinkStreamingConfiguration.json"
+  },
+  "properties": {
+    "twitter": {
+      "type": "object",
+      "javaType": "org.apache.streams.twitter.TwitterStreamConfiguration"
+    },
+    "source": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsReaderConfiguration"
+    },
+    "destination": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsWriterConfiguration"
+    },
+    "providerWaitMs": {
+      "type": "integer"
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterUserInformationPipelineConfiguration.json b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterUserInformationPipelineConfiguration.json
new file mode 100644
index 0000000..5261748
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/jsonschema/TwitterUserInformationPipelineConfiguration.json
@@ -0,0 +1,29 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.examples.flink.twitter.TwitterUserInformationPipelineConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "extends": {
+    "$ref": "FlinkStreamingConfiguration.json"
+  },
+  "properties": {
+    "twitter": {
+      "type": "object",
+      "javaType": "org.apache.streams.twitter.TwitterUserInformationConfiguration"
+    },
+    "source": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsReaderConfiguration"
+    },
+    "destination": {
+      "type": "object",
+      "javaType": "org.apache.streams.hdfs.HdfsWriterConfiguration"
+    },
+    "providerWaitMs": {
+      "type": "integer"
+    }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterFollowingPipeline.dot b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterFollowingPipeline.dot
new file mode 100644
index 0000000..1d0f188
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterFollowingPipeline.dot
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //source
+  source [label="source\nhdfs://${host}:${port}/${path}/${readerPath}",shape=tab];
+
+  //providers
+  TwitterFollowingProvider [label="TwitterFollowingProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java"];
+
+  //persisters
+  RollingFileSink [label="RollingFileSink",shape=ellipse];
+  
+   //data
+  destination [label="destination\nhdfs://${host}:${port}/${path}/${writerPath}",shape=tab];
+
+  //stream
+  TwitterFollowingProvider -> source [dir=back,style=dashed];
+  TwitterFollowingProvider -> RollingFileSink [label="String"];
+  RollingFileSink -> destination;
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterPostsPipeline.dot b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterPostsPipeline.dot
new file mode 100644
index 0000000..d9cbb93
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterPostsPipeline.dot
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //source
+  source [label="source\nhdfs://${host}:${port}/${path}/${readerPath}",shape=tab];
+
+  //providers
+  TwitterTimelineProvider [label="TwitterTimelineProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java"];
+
+  //persisters
+  RollingFileSink [label="RollingFileSink",shape=ellipse];
+  
+   //data
+  destination [label="destination\nhdfs://${host}:${port}/${path}/${writerPath}",shape=tab];
+
+  //stream
+  TwitterTimelineProvider -> source [dir=back,style=dashed];
+  TwitterTimelineProvider -> RollingFileSink [label="String"];
+  RollingFileSink -> destination;
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterSpritzerPipeline.dot b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterSpritzerPipeline.dot
new file mode 100644
index 0000000..1d879a7
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterSpritzerPipeline.dot
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  TwitterStreamProvider [label="TwitterStreamProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java"];
+
+  //persisters
+  RollingFileSink [label="RollingFileSink",shape=ellipse];
+  
+   //data
+  destination [label="hdfs://${host}:${port}/${path}/${writerPath}",shape=box];
+
+  //stream
+  TwitterStreamProvider -> RollingFileSink [label="String"];
+  RollingFileSink -> destination;
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterUserInformationPipeline.dot b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterUserInformationPipeline.dot
new file mode 100644
index 0000000..b203193
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/resources/FlinkTwitterUserInformationPipeline.dot
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //source
+  source [label="source\nhdfs://${host}:${port}/${path}/${readerPath}",shape=tab];
+
+  //providers
+  TwitterUserInformationProvider [label="TwitterUserInformationProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java"];
+
+  //persisters
+  RollingFileSink [label="RollingFileSink",shape=ellipse];
+  
+   //data
+  destination [label="destination\nhdfs://${host}:${port}/${path}/${writerPath}",shape=tab];
+
+  //stream
+  TwitterUserInformationProvider -> source [dir=back,style=dashed];
+  TwitterUserInformationProvider -> RollingFileSink [label="String"];
+  RollingFileSink -> destination;
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/FlinkBase.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/FlinkBase.scala
new file mode 100644
index 0000000..96e2de5
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/FlinkBase.scala
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink
+
+import java.net.MalformedURLException
+
+import com.typesafe.config.Config
+import org.apache.commons.lang3.StringUtils
+import org.apache.flink.api.common.restartstrategy.RestartStrategies
+import org.apache.flink.api.scala.ExecutionEnvironment
+import org.apache.flink.streaming.api.CheckpointingMode
+import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfigurator}
+import org.apache.streams.flink.{FlinkBatchConfiguration, FlinkStreamingConfiguration, StreamsFlinkConfiguration}
+import org.apache.streams.hdfs.{HdfsConfiguration, HdfsReaderConfiguration, HdfsWriterConfiguration}
+import org.apache.streams.jackson.StreamsJacksonMapper
+import org.slf4j.LoggerFactory
+
+/**
+  * FlinkBase is a base class with capabilities common to all of the streams flink examples.
+  */
+trait FlinkBase {
+
+  private val BASELOGGER = LoggerFactory.getLogger("FlinkBase")
+  private val MAPPER = StreamsJacksonMapper.getInstance()
+
+  var configUrl : String = _
+  var typesafe : Config = _
+  var streamsConfig = StreamsConfigurator.detectConfiguration(StreamsConfigurator.getConfig)
+  var streamsFlinkConfiguration: StreamsFlinkConfiguration = _
+
+  var executionEnvironment: ExecutionEnvironment = _
+  var streamExecutionEnvironment: StreamExecutionEnvironment = _
+
+  /*
+   Basic stuff for every flink job
+   */
+  def main(args: Array[String]): Unit = {
+    // if only one argument, use it as the config URL
+    if( args.length > 0 ) {
+      BASELOGGER.info("Args: {}", args)
+      configUrl = args(0)
+      setup(configUrl)
+    }
+
+  }
+
+  def setup(configUrl : String): Boolean =  {
+    BASELOGGER.info("StreamsConfigurator.config: {}", StreamsConfigurator.getConfig)
+    if(StringUtils.isNotEmpty(configUrl)) {
+      BASELOGGER.info("StreamsConfigurator.resolveConfig(configUrl): {}", StreamsConfigurator.resolveConfig(configUrl))
+      try {
+        typesafe = StreamsConfigurator.resolveConfig(configUrl).withFallback(StreamsConfigurator.getConfig).resolve()
+      } catch {
+        case mue: MalformedURLException => {
+          BASELOGGER.error("Invalid Configuration URL: ", mue)
+          return false
+        }
+        case e: Exception => {
+          BASELOGGER.error("Invalid Configuration URL: ", e)
+          return false
+        }
+      }
+    }
+    else {
+      typesafe = StreamsConfigurator.getConfig
+    }
+
+    setup(typesafe)
+
+  }
+
+  def setup(typesafe : Config): Boolean =  {
+    this.typesafe = typesafe
+
+    BASELOGGER.info("Typesafe Config: {}", typesafe)
+
+    if( this.typesafe.getString("mode").equals("streaming")) {
+      val streamingConfiguration: FlinkStreamingConfiguration =
+        new ComponentConfigurator[FlinkStreamingConfiguration](classOf[FlinkStreamingConfiguration]).detectConfiguration(typesafe)
+      setupStreaming(streamingConfiguration)
+    } else if( this.typesafe.getString("mode").equals("batch")) {
+      val batchConfiguration: FlinkBatchConfiguration =
+        new ComponentConfigurator[FlinkBatchConfiguration](classOf[FlinkBatchConfiguration]).detectConfiguration(typesafe)
+      setupBatch(batchConfiguration)
+    } else {
+      false
+    }
+  }
+
+  //  def setup(typesafe: Config): Boolean =  {
+  //
+  //    val streamsConfig = StreamsConfigurator.detectConfiguration(typesafe)
+  //
+  //    this.streamsConfig = streamsConfig
+  //
+  //    BASELOGGER.info("Streams Config: " + streamsConfig)
+  //
+  //    setup(streamsConfig)
+  //  }
+
+  def setupStreaming(streamingConfiguration: FlinkStreamingConfiguration): Boolean = {
+
+    BASELOGGER.info("FsStreamingFlinkConfiguration: " + streamingConfiguration)
+
+    this.streamsFlinkConfiguration = streamingConfiguration
+
+    if( streamsFlinkConfiguration == null) return false
+
+    if( streamExecutionEnvironment == null )
+      streamExecutionEnvironment = streamEnvironment(streamingConfiguration)
+
+    false
+
+  }
+
+  def setupBatch(batchConfiguration: FlinkBatchConfiguration): Boolean =  {
+
+    BASELOGGER.info("FsBatchFlinkConfiguration: " + batchConfiguration)
+
+    this.streamsFlinkConfiguration = batchConfiguration
+
+    if( streamsFlinkConfiguration == null) return false
+
+    if( executionEnvironment == null )
+      executionEnvironment = batchEnvironment(batchConfiguration)
+
+    true
+
+  }
+
+  def batchEnvironment(config: FlinkBatchConfiguration = new FlinkBatchConfiguration()) : ExecutionEnvironment = {
+    if (config.getTest == false && config.getLocal == false) {
+      val env = ExecutionEnvironment.getExecutionEnvironment
+      env
+    } else {
+      val env = ExecutionEnvironment.createLocalEnvironment(config.getParallel.toInt)
+      env
+    }
+  }
+
+  def streamEnvironment(config: FlinkStreamingConfiguration = new FlinkStreamingConfiguration()) : StreamExecutionEnvironment = {
+    if( config.getTest == false && config.getLocal == false) {
+      val env = StreamExecutionEnvironment.getExecutionEnvironment
+
+      env.setRestartStrategy(RestartStrategies.noRestart())
+
+      // start a checkpoint every hour
+      env.enableCheckpointing(config.getCheckpointIntervalMs)
+
+      env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
+
+      // checkpoints have to complete within five minutes, or are discarded
+      env.getCheckpointConfig.setCheckpointTimeout(config.getCheckpointTimeoutMs)
+
+      // allow only one checkpoint to be in progress at the same time
+      env.getCheckpointConfig.setMaxConcurrentCheckpoints(1)
+
+      env
+    }
+
+    else StreamExecutionEnvironment.createLocalEnvironment(config.getParallel.toInt)
+  }
+
+  def buildReaderPath(configObject: HdfsReaderConfiguration) : String = {
+    var inPathBuilder : String = ""
+    if (configObject.getScheme.equals(HdfsConfiguration.Scheme.FILE)) {
+      inPathBuilder = configObject.getPath + "/" + configObject.getReaderPath
+    }
+    else if (configObject.getScheme.equals(HdfsConfiguration.Scheme.HDFS)) {
+      inPathBuilder = configObject.getScheme + "://" + configObject.getHost + ":" + configObject.getPort + "/" + configObject.getPath + "/" + configObject.getReaderPath
+    }
+    else if (configObject.getScheme.toString.equals("s3")) {
+      inPathBuilder = configObject.getScheme + "://" + configObject.getPath + "/" + configObject.getReaderPath
+    } else {
+      throw new Exception("scheme not recognized: " + configObject.getScheme)
+    }
+    inPathBuilder
+  }
+
+  def buildWriterPath(configObject: HdfsWriterConfiguration) : String = {
+    var outPathBuilder : String = ""
+    if( configObject.getScheme.equals(HdfsConfiguration.Scheme.FILE)) {
+      outPathBuilder = configObject.getPath + "/" + configObject.getWriterPath
+    }
+    else if( configObject.getScheme.equals(HdfsConfiguration.Scheme.HDFS)) {
+      outPathBuilder = configObject.getScheme + "://" + configObject.getHost + ":" + configObject.getPort + "/" + configObject.getPath + "/" + configObject.getWriterPath
+    }
+    else if( configObject.getScheme.toString.equals("s3")) {
+      outPathBuilder = configObject.getScheme + "://" + configObject.getPath + "/" + configObject.getWriterPath
+    } else {
+      throw new Exception("output scheme not recognized: " + configObject.getScheme)
+    }
+    outPathBuilder
+  }
+
+  def toProviderId(input : String) : String = {
+    if( input.startsWith("@") )
+      return input.substring(1)
+    if( input.contains(':'))
+      input.substring(input.lastIndexOf(':')+1)
+    else input
+  }
+
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterFollowingPipeline.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterFollowingPipeline.scala
new file mode 100644
index 0000000..5e8ccfd
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterFollowingPipeline.scala
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.collection
+
+import java.util.Objects
+import java.util.concurrent.TimeUnit
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.google.common.util.concurrent.Uninterruptibles
+import org.apache.commons.lang3.StringUtils
+import org.apache.flink.api.common.functions.RichFlatMapFunction
+import org.apache.flink.api.scala._
+import org.apache.flink.core.fs.FileSystem
+import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, StreamExecutionEnvironment}
+import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink
+import org.apache.flink.util.Collector
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfigurator}
+import org.apache.streams.core.StreamsDatum
+import org.apache.streams.examples.flink.FlinkBase
+import org.apache.streams.examples.flink.twitter.TwitterFollowingPipelineConfiguration
+import org.apache.streams.flink.{FlinkStreamingConfiguration, StreamsFlinkConfiguration}
+import org.apache.streams.jackson.StreamsJacksonMapper
+import org.apache.streams.twitter.TwitterFollowingConfiguration
+import org.apache.streams.twitter.pojo.Follow
+import org.apache.streams.twitter.provider.TwitterFollowingProvider
+import org.hamcrest.MatcherAssert
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.collection.JavaConversions._
+
+/**
+  * FlinkTwitterFollowingPipeline collects friends or followers of all profiles from a
+  * set of IDs, writing each connection as a twitter:follow in json format to dfs.
+  */
+object FlinkTwitterFollowingPipeline extends FlinkBase {
+
+  val STREAMS_ID: String = "FlinkTwitterFollowingPipeline"
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterUserInformationPipeline])
+  private val MAPPER: ObjectMapper = StreamsJacksonMapper.getInstance()
+
+  override def main(args: Array[String]) = {
+    super.main(args)
+    val jobConfig = new ComponentConfigurator[TwitterFollowingPipelineConfiguration](classOf[TwitterFollowingPipelineConfiguration]).detectConfiguration(typesafe)
+    if( !setup(jobConfig) ) System.exit(1)
+    val pipeline: FlinkTwitterFollowingPipeline = new FlinkTwitterFollowingPipeline(jobConfig)
+    val thread = new Thread(pipeline)
+    thread.start()
+    thread.join()
+  }
+
+  def setup(jobConfig: TwitterFollowingPipelineConfiguration): Boolean =  {
+
+    LOGGER.info("TwitterFollowingPipelineConfiguration: " + jobConfig)
+
+    if( jobConfig == null ) {
+      LOGGER.error("jobConfig is null!")
+      System.err.println("jobConfig is null!")
+      return false
+    }
+
+    if( jobConfig.getSource == null ) {
+      LOGGER.error("jobConfig.getSource is null!")
+      System.err.println("jobConfig.getSource is null!")
+      return false
+    }
+
+    if( jobConfig.getDestination == null ) {
+      LOGGER.error("jobConfig.getDestination is null!")
+      System.err.println("jobConfig.getDestination is null!")
+      return false
+    }
+
+    if( jobConfig.getTwitter == null ) {
+      LOGGER.error("jobConfig.getTwitter is null!")
+      System.err.println("jobConfig.getTwitter is null!")
+      return false
+    }
+
+    Objects.requireNonNull(jobConfig.getTwitter.getOauth)
+    MatcherAssert.assertThat("OAuth Access Token is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessToken))
+    MatcherAssert.assertThat("OAuth Access Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessTokenSecret))
+    MatcherAssert.assertThat("OAuth Consumer Key is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerKey))
+    MatcherAssert.assertThat("OAuth Consumer Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerSecret))
+
+    true
+
+  }
+
+}
+
+class FlinkTwitterFollowingPipeline(config: TwitterFollowingPipelineConfiguration = new ComponentConfigurator[TwitterFollowingPipelineConfiguration](classOf[TwitterFollowingPipelineConfiguration]).detectConfiguration(StreamsConfigurator.getConfig)) extends Runnable with java.io.Serializable {
+
+  import FlinkTwitterFollowingPipeline._
+
+  override def run(): Unit = {
+
+    val env: StreamExecutionEnvironment = streamEnvironment(MAPPER.convertValue(config, classOf[FlinkStreamingConfiguration]))
+
+    env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
+    env.setNumberOfExecutionRetries(0)
+
+    val inPath = buildReaderPath(config.getSource)
+
+    val outPath = buildWriterPath(config.getDestination)
+
+    val keyed_ids: KeyedStream[String, Int] = env.readTextFile(inPath).setParallelism(10).keyBy( id => (id.hashCode % 100).abs )
+
+    // these datums contain 'Follow' objects
+    val followDatums: DataStream[StreamsDatum] =
+      keyed_ids.flatMap(new FollowingCollectorFlatMapFunction(config.getTwitter)).setParallelism(10)
+
+    val follows: DataStream[Follow] = followDatums
+      .map(datum => datum.getDocument.asInstanceOf[Follow])
+
+    val jsons: DataStream[String] = follows
+      .map(follow => {
+        val MAPPER = StreamsJacksonMapper.getInstance
+        MAPPER.writeValueAsString(follow)
+      })
+
+    if( config.getTest == false )
+      jsons.addSink(new BucketingSink[String](outPath)).setParallelism(3)
+    else
+      jsons.writeAsText(outPath,FileSystem.WriteMode.OVERWRITE)
+        .setParallelism(env.getParallelism)
+
+    // if( test == true ) jsons.print();
+
+    env.execute(STREAMS_ID)
+  }
+
+  class FollowingCollectorFlatMapFunction(
+                                           twitterConfiguration : TwitterFollowingConfiguration = new ComponentConfigurator[TwitterFollowingConfiguration](classOf[TwitterFollowingConfiguration]).detectConfiguration(StreamsConfigurator.getConfig.getConfig("twitter")),
+                                           flinkConfiguration : StreamsFlinkConfiguration = new ComponentConfigurator[StreamsFlinkConfiguration](classOf[StreamsFlinkConfiguration]).detectConfiguration(StreamsConfigurator.getConfig)
+                                         ) extends RichFlatMapFunction[String, StreamsDatum] with Serializable {
+
+    override def flatMap(input: String, out: Collector[StreamsDatum]): Unit = {
+      collectConnections(input, out)
+    }
+
+    def collectConnections(id : String, out : Collector[StreamsDatum]) = {
+      val twitProvider: TwitterFollowingProvider =
+        new TwitterFollowingProvider(
+          twitterConfiguration.withInfo(List(toProviderId(id))).asInstanceOf[TwitterFollowingConfiguration]
+        )
+      twitProvider.prepare(twitProvider)
+      twitProvider.startStream()
+      var iterator: Iterator[StreamsDatum] = null
+      do {
+        Uninterruptibles.sleepUninterruptibly(flinkConfiguration.getProviderWaitMs, TimeUnit.MILLISECONDS)
+        twitProvider.readCurrent().iterator().toList.map(out.collect(_))
+      } while( twitProvider.isRunning )
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterPostsPipeline.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterPostsPipeline.scala
new file mode 100644
index 0000000..f9b033e
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterPostsPipeline.scala
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.collection
+
+import java.util.Objects
+import java.util.concurrent.TimeUnit
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.google.common.util.concurrent.Uninterruptibles
+import org.apache.commons.lang3.StringUtils
+import org.apache.flink.api.common.functions.RichFlatMapFunction
+import org.apache.flink.api.scala._
+import org.apache.flink.core.fs.FileSystem
+import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, StreamExecutionEnvironment}
+import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink
+import org.apache.flink.util.Collector
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfigurator}
+import org.apache.streams.core.StreamsDatum
+import org.apache.streams.examples.flink.FlinkBase
+import org.apache.streams.examples.flink.twitter.TwitterPostsPipelineConfiguration
+import org.apache.streams.flink.FlinkStreamingConfiguration
+import org.apache.streams.jackson.StreamsJacksonMapper
+import org.apache.streams.twitter.pojo.Tweet
+import org.apache.streams.twitter.provider.TwitterTimelineProvider
+import org.hamcrest.MatcherAssert
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.collection.JavaConversions._
+
+/**
+  * FlinkTwitterPostsPipeline collects recent posts from all profiles from a
+  * set of IDs, writing each post as a twitter:status in json format to dfs.
+  */
+object FlinkTwitterPostsPipeline extends FlinkBase {
+
+  val STREAMS_ID: String = "FlinkTwitterPostsPipeline"
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterPostsPipeline])
+  private val MAPPER: ObjectMapper = StreamsJacksonMapper.getInstance()
+
+  override def main(args: Array[String]) = {
+    super.main(args)
+    val jobConfig = new ComponentConfigurator[TwitterPostsPipelineConfiguration](classOf[TwitterPostsPipelineConfiguration]).detectConfiguration(typesafe)
+    if( !setup(jobConfig) ) System.exit(1)
+    val pipeline: FlinkTwitterPostsPipeline = new FlinkTwitterPostsPipeline(jobConfig)
+    val thread = new Thread(pipeline)
+    thread.start()
+    thread.join()
+  }
+
+  def setup(jobConfig: TwitterPostsPipelineConfiguration): Boolean =  {
+
+    LOGGER.info("TwitterPostsPipelineConfiguration: " + jobConfig)
+
+    if( jobConfig == null ) {
+      LOGGER.error("jobConfig is null!")
+      System.err.println("jobConfig is null!")
+      return false
+    }
+
+    if( jobConfig.getSource == null ) {
+      LOGGER.error("jobConfig.getSource is null!")
+      System.err.println("jobConfig.getSource is null!")
+      return false
+    }
+
+    if( jobConfig.getDestination == null ) {
+      LOGGER.error("jobConfig.getDestination is null!")
+      System.err.println("jobConfig.getDestination is null!")
+      return false
+    }
+
+    if( jobConfig.getTwitter == null ) {
+      LOGGER.error("jobConfig.getTwitter is null!")
+      System.err.println("jobConfig.getTwitter is null!")
+      return false
+    }
+
+    Objects.requireNonNull(jobConfig.getTwitter.getOauth)
+    MatcherAssert.assertThat("OAuth Access Token is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessToken))
+    MatcherAssert.assertThat("OAuth Access Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessTokenSecret))
+    MatcherAssert.assertThat("OAuth Consumer Key is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerKey))
+    MatcherAssert.assertThat("OAuth Consumer Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerSecret))
+
+    true
+
+  }
+
+}
+
+class FlinkTwitterPostsPipeline(config: TwitterPostsPipelineConfiguration = new ComponentConfigurator[TwitterPostsPipelineConfiguration](classOf[TwitterPostsPipelineConfiguration]).detectConfiguration(StreamsConfigurator.getConfig)) extends Runnable with java.io.Serializable {
+
+  import FlinkTwitterPostsPipeline._
+
+  override def run(): Unit = {
+
+    val env: StreamExecutionEnvironment = streamEnvironment(MAPPER.convertValue(config, classOf[FlinkStreamingConfiguration]))
+
+    env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
+    env.setNumberOfExecutionRetries(0)
+
+    val inPath = buildReaderPath(config.getSource)
+
+    val outPath = buildWriterPath(config.getDestination)
+
+    val ids: DataStream[String] = env.readTextFile(inPath).setParallelism(10).name("ids")
+
+    val keyed_ids: KeyedStream[String, Int] = env.readTextFile(inPath).setParallelism(10).name("keyed_ids").keyBy( id => (id.hashCode % 100).abs )
+
+    // these datums contain 'Tweet' objects
+    val tweetDatums: DataStream[StreamsDatum] =
+      keyed_ids.flatMap(new postCollectorFlatMapFunction).setParallelism(10).name("tweetDatums")
+
+    val tweets: DataStream[Tweet] = tweetDatums
+      .map(datum => datum.getDocument.asInstanceOf[Tweet]).name("tweets")
+
+    val jsons: DataStream[String] = tweets
+      .map(tweet => {
+        val MAPPER = StreamsJacksonMapper.getInstance
+        MAPPER.writeValueAsString(tweet)
+      }).name("json")
+
+    if( config.getTest == false )
+      jsons.addSink(new BucketingSink[String](outPath)).setParallelism(3).name("hdfs")
+    else
+      jsons.writeAsText(outPath,FileSystem.WriteMode.OVERWRITE)
+        .setParallelism(env.getParallelism)
+
+    // if( test == true ) jsons.print();
+
+    env.execute(STREAMS_ID)
+  }
+
+  class postCollectorFlatMapFunction extends RichFlatMapFunction[String, StreamsDatum] with Serializable {
+    override def flatMap(input: String, out: Collector[StreamsDatum]): Unit = {
+      collectPosts(input, out)
+    }
+    def collectPosts(id : String, out : Collector[StreamsDatum]) = {
+      val twitterConfiguration = config.getTwitter
+      twitterConfiguration.setInfo(List(toProviderId(id)))
+      val twitProvider: TwitterTimelineProvider =
+        new TwitterTimelineProvider(twitterConfiguration)
+      twitProvider.prepare(twitProvider)
+      twitProvider.startStream()
+      var iterator: Iterator[StreamsDatum] = null
+      do {
+        Uninterruptibles.sleepUninterruptibly(config.getProviderWaitMs, TimeUnit.MILLISECONDS)
+        twitProvider.readCurrent().iterator().toList.map(out.collect(_))
+      } while( twitProvider.isRunning )
+    }
+  }
+
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterSpritzerPipeline.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterSpritzerPipeline.scala
new file mode 100644
index 0000000..f4379c1
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterSpritzerPipeline.scala
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.collection
+
+import java.io.Serializable
+import java.util.Objects
+import java.util.concurrent.TimeUnit
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.google.common.util.concurrent.Uninterruptibles
+import org.apache.commons.lang3.StringUtils
+import org.apache.flink.api.common.functions.StoppableFunction
+import org.apache.flink.api.scala._
+import org.apache.flink.configuration.Configuration
+import org.apache.flink.core.fs.FileSystem
+import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}
+import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
+import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfigurator}
+import org.apache.streams.core.StreamsDatum
+import org.apache.streams.examples.flink.FlinkBase
+import org.apache.streams.examples.flink.twitter.TwitterSpritzerPipelineConfiguration
+import org.apache.streams.flink.FlinkStreamingConfiguration
+import org.apache.streams.jackson.StreamsJacksonMapper
+import org.apache.streams.twitter.TwitterStreamConfiguration
+import org.apache.streams.twitter.converter.TwitterDateTimeFormat
+import org.apache.streams.twitter.provider.TwitterStreamProvider
+import org.hamcrest.MatcherAssert
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.collection.JavaConversions._
+
+/**
+  * FlinkTwitterSpritzerPipeline opens a spritzer stream and writes
+  * each post received as a twitter:status in json format to dfs.
+  */
+object FlinkTwitterSpritzerPipeline extends FlinkBase {
+
+  val STREAMS_ID: String = "FlinkTwitterSpritzerPipeline"
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterPostsPipeline])
+  private val MAPPER: ObjectMapper = StreamsJacksonMapper.getInstance()
+
+  override def main(args: Array[String]) = {
+    super.main(args)
+    val jobConfig = new ComponentConfigurator(classOf[TwitterSpritzerPipelineConfiguration]).detectConfiguration(typesafe)
+    if( !setup(jobConfig) ) System.exit(1)
+    val pipeline: FlinkTwitterSpritzerPipeline = new FlinkTwitterSpritzerPipeline(jobConfig)
+    val thread = new Thread(pipeline)
+    thread.start()
+    thread.join()
+  }
+
+  def setup(jobConfig: TwitterSpritzerPipelineConfiguration): Boolean =  {
+
+    LOGGER.info("TwitterSpritzerPipelineConfiguration: " + jobConfig)
+
+    if( jobConfig == null ) {
+      LOGGER.error("jobConfig is null!")
+      System.err.println("jobConfig is null!")
+      return false
+    }
+
+    if( jobConfig.getDestination == null ) {
+      LOGGER.error("jobConfig.getDestination is null!")
+      System.err.println("jobConfig.getDestination is null!")
+      return false
+    }
+
+    if( jobConfig.getTwitter == null ) {
+      LOGGER.error("jobConfig.getTwitter is null!")
+      System.err.println("jobConfig.getTwitter is null!")
+      return false
+    }
+
+    Objects.requireNonNull(jobConfig.getTwitter.getOauth)
+    MatcherAssert.assertThat("OAuth Access Token is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessToken))
+    MatcherAssert.assertThat("OAuth Access Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessTokenSecret))
+    MatcherAssert.assertThat("OAuth Consumer Key is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerKey))
+    MatcherAssert.assertThat("OAuth Consumer Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerSecret))
+
+    true
+
+  }
+
+}
+
+class FlinkTwitterSpritzerPipeline(config: TwitterSpritzerPipelineConfiguration = new ComponentConfigurator(classOf[TwitterSpritzerPipelineConfiguration]).detectConfiguration(StreamsConfigurator.getConfig)) extends Runnable with java.io.Serializable {
+
+  import FlinkTwitterSpritzerPipeline._
+
+  val spritzerSource = new SpritzerSource(config.getTwitter)
+
+  override def run(): Unit = {
+
+    val env: StreamExecutionEnvironment = streamEnvironment(MAPPER.convertValue(config, classOf[FlinkStreamingConfiguration]))
+
+    env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
+    env.setNumberOfExecutionRetries(0)
+
+    val outPath = buildWriterPath(config.getDestination)
+
+    val streamSource : DataStream[String] = env.addSource(spritzerSource)
+
+    if( config.getTest == false )
+      streamSource.addSink(new BucketingSink[String](outPath)).setParallelism(3).name("hdfs")
+    else
+      streamSource.writeAsText(outPath,FileSystem.WriteMode.OVERWRITE)
+        .setParallelism(env.getParallelism)
+
+    // if( test == true ) jsons.print();
+
+    env.execute(STREAMS_ID)
+
+  }
+
+  def stop(): Unit = {
+    spritzerSource.stop()
+  }
+
+  class SpritzerSource(sourceConfig: TwitterStreamConfiguration) extends RichSourceFunction[String] with Serializable with StoppableFunction {
+
+    var mapper: ObjectMapper = _
+
+    var twitProvider: TwitterStreamProvider = _
+
+    @throws[Exception]
+    override def open(parameters: Configuration): Unit = {
+      mapper = StreamsJacksonMapper.getInstance(TwitterDateTimeFormat.TWITTER_FORMAT)
+      twitProvider = new TwitterStreamProvider( sourceConfig )
+      twitProvider.prepare(twitProvider)
+      twitProvider.startStream()
+    }
+
+    override def run(ctx: SourceFunction.SourceContext[String]): Unit = {
+      var iterator: Iterator[StreamsDatum] = null
+      do {
+        Uninterruptibles.sleepUninterruptibly(config.getProviderWaitMs, TimeUnit.MILLISECONDS)
+        iterator = twitProvider.readCurrent().iterator()
+        iterator.toList.map(datum => ctx.collect(mapper.writeValueAsString(datum.getDocument)))
+      } while( twitProvider.isRunning )
+    }
+
+    override def cancel(): Unit = {
+      close()
+    }
+
+    override def stop(): Unit = {
+      close()
+    }
+  }
+
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterUserInformationPipeline.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterUserInformationPipeline.scala
new file mode 100644
index 0000000..e3160f1
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/main/scala/org/apache/streams/examples/flink/twitter/collection/FlinkTwitterUserInformationPipeline.scala
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.collection
+
+import java.util.Objects
+import java.util.concurrent.TimeUnit
+
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.google.common.util.concurrent.Uninterruptibles
+import org.apache.commons.lang3.StringUtils
+import org.apache.flink.api.common.functions.RichFlatMapFunction
+import org.apache.flink.api.scala._
+import org.apache.flink.core.fs.FileSystem
+import org.apache.flink.streaming.api.TimeCharacteristic
+import org.apache.flink.streaming.api.scala.function.WindowFunction
+import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, StreamExecutionEnvironment, WindowedStream}
+import org.apache.flink.streaming.api.windowing.windows.GlobalWindow
+import org.apache.flink.streaming.connectors.fs.bucketing.BucketingSink
+import org.apache.flink.util.Collector
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfigurator}
+import org.apache.streams.core.StreamsDatum
+import org.apache.streams.examples.flink.FlinkBase
+import org.apache.streams.examples.flink.twitter.TwitterUserInformationPipelineConfiguration
+import org.apache.streams.flink.FlinkStreamingConfiguration
+import org.apache.streams.jackson.StreamsJacksonMapper
+import org.apache.streams.twitter.pojo.User
+import org.apache.streams.twitter.provider.TwitterUserInformationProvider
+import org.hamcrest.MatcherAssert
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.collection.JavaConversions._
+
+/**
+  * FlinkTwitterPostsPipeline collects the current user profile of a
+  * set of IDs, writing each as a twitter:user in json format to dfs.
+  */
+object FlinkTwitterUserInformationPipeline extends FlinkBase {
+
+  val STREAMS_ID: String = "FlinkTwitterUserInformationPipeline"
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterUserInformationPipeline])
+  private val MAPPER: ObjectMapper = StreamsJacksonMapper.getInstance()
+
+  override def main(args: Array[String]) = {
+    super.main(args)
+    val jobConfig = new ComponentConfigurator[TwitterUserInformationPipelineConfiguration](classOf[TwitterUserInformationPipelineConfiguration]).detectConfiguration(typesafe)
+    if( !setup(jobConfig) ) System.exit(1)
+    val pipeline: FlinkTwitterUserInformationPipeline = new FlinkTwitterUserInformationPipeline(jobConfig)
+    val thread = new Thread(pipeline)
+    thread.start()
+    thread.join()
+  }
+
+  def setup(jobConfig: TwitterUserInformationPipelineConfiguration): Boolean =  {
+
+    LOGGER.info("TwitterFollowingPipelineConfiguration: " + jobConfig)
+
+    if( jobConfig == null ) {
+      LOGGER.error("jobConfig is null!")
+      System.err.println("jobConfig is null!")
+      return false
+    }
+
+    if( jobConfig.getSource == null ) {
+      LOGGER.error("jobConfig.getSource is null!")
+      System.err.println("jobConfig.getSource is null!")
+      return false
+    }
+
+    if( jobConfig.getDestination == null ) {
+      LOGGER.error("jobConfig.getDestination is null!")
+      System.err.println("jobConfig.getDestination is null!")
+      return false
+    }
+
+    if( jobConfig.getTwitter == null ) {
+      LOGGER.error("jobConfig.getTwitter is null!")
+      System.err.println("jobConfig.getTwitter is null!")
+      return false
+    }
+
+    Objects.requireNonNull(jobConfig.getTwitter.getOauth)
+    MatcherAssert.assertThat("OAuth Access Token is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessToken))
+    MatcherAssert.assertThat("OAuth Access Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getAccessTokenSecret))
+    MatcherAssert.assertThat("OAuth Consumer Key is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerKey))
+    MatcherAssert.assertThat("OAuth Consumer Secret is not Empty",
+      StringUtils.isNotEmpty(jobConfig.getTwitter.getOauth.getConsumerSecret))
+
+    true
+
+  }
+
+}
+
+class FlinkTwitterUserInformationPipeline(config: TwitterUserInformationPipelineConfiguration = new ComponentConfigurator[TwitterUserInformationPipelineConfiguration](classOf[TwitterUserInformationPipelineConfiguration]).detectConfiguration(StreamsConfigurator.getConfig)) extends Runnable with java.io.Serializable {
+
+  import FlinkTwitterUserInformationPipeline._
+
+  override def run(): Unit = {
+
+    val env: StreamExecutionEnvironment = streamEnvironment(MAPPER.convertValue(config, classOf[FlinkStreamingConfiguration]))
+
+    env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime)
+    env.setNumberOfExecutionRetries(0)
+
+    val inPath = buildReaderPath(config.getSource)
+
+    val outPath = buildWriterPath(config.getDestination)
+
+    val ids: DataStream[String] = env.readTextFile(inPath).setParallelism(10).name("ids")
+
+    val keyed_ids: KeyedStream[String, Int] = ids.name("keyed_ids").keyBy( id => (id.hashCode % 100).abs )
+
+    val idWindows: WindowedStream[String, Int, GlobalWindow] = keyed_ids.countWindow(100)
+
+    val idLists: DataStream[List[String]] = idWindows.apply[List[String]] (new idListWindowFunction()).name("idLists")
+
+    val userDatums: DataStream[StreamsDatum] = idLists.flatMap(new profileCollectorFlatMapFunction).setParallelism(10).name("userDatums")
+
+    val user: DataStream[User] = userDatums.map(datum => datum.getDocument.asInstanceOf[User]).name("users")
+
+    val jsons: DataStream[String] = user
+      .map(user => {
+        val MAPPER = StreamsJacksonMapper.getInstance
+        MAPPER.writeValueAsString(user)
+      }).name("jsons")
+
+    if( config.getTest == false )
+      jsons.addSink(new BucketingSink[String](outPath)).setParallelism(3).name("hdfs")
+    else
+      jsons.writeAsText(outPath,FileSystem.WriteMode.OVERWRITE)
+        .setParallelism(env.getParallelism)
+
+    LOGGER.info("StreamExecutionEnvironment: {}", env.toString )
+
+    env.execute(STREAMS_ID)
+  }
+
+  class idListWindowFunction extends WindowFunction[String, List[String], Int, GlobalWindow] {
+    override def apply(key: Int, window: GlobalWindow, input: Iterable[String], out: Collector[List[String]]): Unit = {
+      if( input.nonEmpty )
+        out.collect(input.map(id => toProviderId(id)).toList)
+    }
+  }
+
+  class profileCollectorFlatMapFunction extends RichFlatMapFunction[List[String], StreamsDatum] with Serializable {
+    override def flatMap(input: List[String], out: Collector[StreamsDatum]): Unit = {
+      collectProfiles(input, out)
+    }
+    def collectProfiles(ids : List[String], out : Collector[StreamsDatum]) = {
+      val twitterConfiguration = config.getTwitter
+      val twitProvider: TwitterUserInformationProvider =
+        new TwitterUserInformationProvider(
+          twitterConfiguration.withInfo(ids)
+        )
+      twitProvider.prepare(twitProvider)
+      twitProvider.startStream()
+      var iterator: Iterator[StreamsDatum] = null
+      do {
+        Uninterruptibles.sleepUninterruptibly(config.getProviderWaitMs, TimeUnit.MILLISECONDS)
+        twitProvider.readCurrent().iterator().toList.map(out.collect(_))
+      } while( twitProvider.isRunning )
+    }
+  }
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterFollowingPipeline.md b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterFollowingPipeline.md
new file mode 100644
index 0000000..f9f39e1
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterFollowingPipeline.md
@@ -0,0 +1,52 @@
+### FlinkTwitterFollowingPipeline
+
+#### Description:
+
+Collects twitter friends or followers with flink.
+
+#### Configuration:
+
+[TwitterFollowingPipelineConfiguration.json](TwitterFollowingPipelineConfiguration.json "TwitterFollowingPipelineConfiguration.json" )
+
+    include "flink.conf"
+    include "twitter.oauth.conf"
+    source {
+      fields = ["ID"]
+      scheme = file
+      path = "target/test-classes"
+      readerPath = "asf.txt"
+    }
+    destination {
+      fields = ["DOC"]
+      scheme = file
+      path = "target/test-classes"
+      writerPath = "FlinkTwitterFollowingPipelineFriendsIT"
+    }
+    twitter {
+      endpoint = friends
+      ids_only = true
+    }
+    
+#### Run (Local):
+
+    java -cp dist/flink-twitter-collection-jar-with-dependencies.jar -Dconfig.file=file://<location_of_config_file> org.apache.streams.examples.flink.twitter.collection.FlinkTwitterFollowingPipeline
+
+#### Run (Flink):
+
+    flink-run.sh dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterFollowingPipeline http://<location_of_config_file> 
+
+#### Run (YARN):
+
+    flink-run.sh yarn dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterFollowingPipeline http://<location_of_config_file> 
+
+#### Specification:
+
+[FlinkTwitterFollowingPipeline.dot](FlinkTwitterFollowingPipeline.dot "FlinkTwitterFollowingPipeline.dot" )
+
+#### Diagram:
+
+![FlinkTwitterFollowingPipeline.dot.svg](./FlinkTwitterFollowingPipeline.dot.svg)
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterPostsPipeline.md b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterPostsPipeline.md
new file mode 100644
index 0000000..0b4c8bd
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterPostsPipeline.md
@@ -0,0 +1,48 @@
+### FlinkTwitterPostsPipeline
+
+#### Description:
+
+Collects twitter posts with flink.
+
+#### Configuration:
+
+[TwitterPostsPipelineConfiguration.json](TwitterPostsPipelineConfiguration.json "TwitterPostsPipelineConfiguration.json" )
+
+    include "flink.conf"
+    include "twitter.oauth.conf"
+    source {
+      fields = ["ID"]
+      scheme = file
+      path = "target/test-classes"
+      readerPath = "asf.txt"
+    }
+    destination {
+      fields = ["DOC"]
+      scheme = file
+      path = "target/test-classes"
+      writerPath = "FlinkTwitterPostsPipelineIT"
+    }
+    
+#### Run (Local):
+
+    java -cp dist/flink-twitter-collection-jar-with-dependencies.jar -Dconfig.file=file://<location_of_config_file> org.apache.streams.examples.flink.twitter.collection.FlinkTwitterPostsPipeline
+
+#### Run (Flink):
+
+    flink-run.sh dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterPostsPipeline http://<location_of_config_file> 
+
+#### Run (YARN):
+
+    flink-run.sh yarn dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterPostsPipeline http://<location_of_config_file> 
+
+#### Specification:
+
+[FlinkTwitterPostsPipeline.dot](FlinkTwitterPostsPipeline.dot "FlinkTwitterPostsPipeline.dot" )
+
+#### Diagram:
+
+![FlinkTwitterPostsPipeline.dot.svg](./FlinkTwitterPostsPipeline.dot.svg)
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterSpritzerPipeline.md b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterSpritzerPipeline.md
new file mode 100644
index 0000000..0a82321
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterSpritzerPipeline.md
@@ -0,0 +1,48 @@
+### FlinkTwitterSpritzerPipeline
+
+#### Description:
+
+Collects twitter posts in real-time from the sample endpoint with flink.
+
+#### Configuration:
+
+[TwitterSpritzerPipelineConfiguration.json](TwitterSpritzerPipelineConfiguration.json "TwitterSpritzerPipelineConfiguration.json" )
+
+    include "flink.conf"
+    include "twitter.oauth.conf"
+    destination {
+      fields = ["DOC"]
+      scheme = file
+      path = "target/test-classes"
+      writerPath = "FlinkTwitterSpritzerPipelineIT"
+    }
+    twitter {
+      endpoint = sample
+      track = [
+        "data"
+      ]
+    }
+    
+#### Run (Local):
+
+    java -cp dist/flink-twitter-collection-jar-with-dependencies.jar -Dconfig.file=file://<location_of_config_file> org.apache.streams.examples.flink.twitter.collection.FlinkTwitterSpritzerPipeline
+
+#### Run (Flink):
+
+    flink-run.sh dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterSpritzerPipeline http://<location_of_config_file> 
+
+#### Run (YARN):
+
+    flink-run.sh yarn dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterSpritzerPipeline http://<location_of_config_file> 
+
+#### Specification:
+
+[FlinkTwitterSpritzerPipeline.dot](FlinkTwitterSpritzerPipeline.dot "FlinkTwitterSpritzerPipeline.dot" )
+
+#### Diagram:
+
+![FlinkTwitterSpritzerPipeline.dot.svg](./FlinkTwitterSpritzerPipeline.dot.svg)
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterUserInformationPipeline.md b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterUserInformationPipeline.md
new file mode 100644
index 0000000..ad90fab
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/FlinkTwitterUserInformationPipeline.md
@@ -0,0 +1,48 @@
+### FlinkTwitterUserInformationPipeline
+
+#### Description:
+
+Collects twitter user profiles with flink.
+
+#### Configuration:
+
+[TwitterUserInformationPipelineConfiguration.json](TwitterUserInformationPipelineConfiguration.json "TwitterUserInformationPipelineConfiguration.json" )
+
+    include "flink.conf"
+    include "twitter.oauth.conf"
+    source {
+      fields = ["ID"]
+      scheme = file
+      path = "target/test-classes"
+      readerPath = "1000twitterids.txt"
+    }
+    destination {
+      fields = ["DOC"]
+      scheme = file
+      path = "target/test-classes"
+      writerPath = "FlinkTwitterUserInformationPipelineIT"
+    }
+    
+#### Run (Local):
+
+    java -cp dist/flink-twitter-collection-jar-with-dependencies.jar -Dconfig.file=file://<location_of_config_file> org.apache.streams.examples.flink.twitter.collection.FlinkTwitterUserInformationPipeline
+
+#### Run (Flink):
+
+    flink-run.sh dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterUserInformationPipeline http://<location_of_config_file> 
+
+#### Run (YARN):
+
+    flink-run.sh yarn dist/flink-twitter-collection-jar-with-dependencies.jar org.apache.streams.examples.flink.twitter.collection.FlinkTwitterUserInformationPipeline http://<location_of_config_file> 
+
+#### Specification:
+
+[FlinkTwitterUserInformationPipeline.dot](FlinkTwitterUserInformationPipeline.dot "FlinkTwitterUserInformationPipeline.dot" )
+
+#### Diagram:
+
+![TwitterUserInformationPipeline.dot.svg](./TwitterUserInformationPipeline.dot.svg)
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/index.md b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/index.md
new file mode 100644
index 0000000..4d534e5
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/markdown/index.md
@@ -0,0 +1,32 @@
+### flink-twitter-collection
+
+#### Requirements:
+ - Authorized Twitter API credentials
+
+#### Description:
+
+Collects large batches of documents from api.twitter.com from a seed set of ids.
+
+#### Streams:
+
+<a href="FlinkTwitterFollowingPipeline.html" target="_self">FlinkTwitterFollowingPipeline</a>
+
+<a href="FlinkTwitterPostsPipeline.html" target="_self">FlinkTwitterPostsPipeline</a>
+
+<a href="FlinkTwitterSpritzerPipeline.html" target="_self">FlinkTwitterSpritzerPipeline</a>
+
+<a href="FlinkTwitterUserInformationPipeline.html" target="_self">FlinkTwitterUserInformationPipeline</a>
+
+#### Build:
+
+    mvn clean package    
+
+#### Test:
+
+Build with integration testing enabled, using your credentials
+
+    mvn clean test verify -DskipITs=false -DargLine="-Dconfig.file=twitter.oauth.conf"
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/site.xml b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/site.xml
new file mode 100644
index 0000000..f801659
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/site/site.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Credentials">
+            <item name="Twitter" href="../../credentials/twitter.html"/>
+        </menu>
+        <menu name="Runtime">
+            <item name="Flink" href="../flink.html"/>
+        </menu>
+    </body>
+</project>
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/1000twitterids.txt b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/1000twitterids.txt
new file mode 100644
index 0000000..0590b9d
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/1000twitterids.txt
@@ -0,0 +1,1000 @@
+twitter:3424266646
+twitter:3277467241
+twitter:3244517214
+twitter:29953647
+twitter:63818319
+twitter:1528436754
+twitter:405580894
+twitter:322778026
+twitter:172382176
+twitter:633076833
+twitter:703735608
+twitter:2347223440
+twitter:2907929487
+twitter:950240089
+twitter:1418546592
+twitter:3318418717
+twitter:2848958704
+twitter:1120797264
+twitter:933623324
+twitter:2977700375
+twitter:328204518
+twitter:585131136
+twitter:2868789793
+twitter:158347647
+twitter:2915413161
+twitter:2217367263
+twitter:2534019247
+twitter:3033565239
+twitter:377379801
+twitter:2525341814
+twitter:3123827524
+twitter:1840932523
+twitter:3307643975
+twitter:3301777832
+twitter:961987748
+twitter:3205632255
+twitter:2799469322
+twitter:17730681
+twitter:1495242662
+twitter:1909516123
+twitter:263933760
+twitter:312651511
+twitter:2479527469
+twitter:2357151036
+twitter:346433828
+twitter:44801893
+twitter:1049697306
+twitter:2779673194
+twitter:18323141
+twitter:2172488902
+twitter:2373431930
+twitter:1038322550
+twitter:2946211549
+twitter:2911057543
+twitter:1186036284
+twitter:2878076317
+twitter:1312950464
+twitter:57323685
+twitter:32929857
+twitter:301933631
+twitter:2852217152
+twitter:330422649
+twitter:98470876
+twitter:933125156
+twitter:3237125761
+twitter:914882005
+twitter:1560239652
+twitter:900444860
+twitter:402918702
+twitter:1820690166
+twitter:3074359086
+twitter:353183684
+twitter:528544881
+twitter:1881638161
+twitter:2751762993
+twitter:3161315692
+twitter:3305680079
+twitter:1721613488
+twitter:513068659
+twitter:627186234
+twitter:3203648416
+twitter:1541163325
+twitter:1882043502
+twitter:29071727
+twitter:610104090
+twitter:2819781014
+twitter:2909115204
+twitter:213886397
+twitter:3249385591
+twitter:3086875073
+twitter:87040031
+twitter:2202487475
+twitter:334896132
+twitter:49163181
+twitter:3433984816
+twitter:543969362
+twitter:489445461
+twitter:855051894
+twitter:2792040175
+twitter:117051455
+twitter:438599410
+twitter:1387329846
+twitter:711595782
+twitter:3230662766
+twitter:2766672269
+twitter:2926781875
+twitter:863203928
+twitter:517199566
+twitter:201645935
+twitter:1555939147
+twitter:2943152669
+twitter:1324775431
+twitter:400234897
+twitter:2347416842
+twitter:1558112510
+twitter:474415350
+twitter:2153710970
+twitter:1408335014
+twitter:3633713483
+twitter:3166021013
+twitter:3530993294
+twitter:332598229
+twitter:308252069
+twitter:3317826986
+twitter:572175644
+twitter:1718271572
+twitter:2869090090
+twitter:23725109
+twitter:1926137280
+twitter:1486830500
+twitter:743080386
+twitter:3250479720
+twitter:2560441544
+twitter:2715649872
+twitter:287089153
+twitter:18761334
+twitter:2305577745
+twitter:724860668
+twitter:193306049
+twitter:2615761979
+twitter:2463299598
+twitter:1436916012
+twitter:919019185
+twitter:90502449
+twitter:50689522
+twitter:1383774679
+twitter:612784850
+twitter:410319975
+twitter:833440153
+twitter:442322844
+twitter:2181167094
+twitter:94012832
+twitter:112748352
+twitter:1474618075
+twitter:158262669
+twitter:2391506308
+twitter:882502026
+twitter:2693660146
+twitter:2971933908
+twitter:55271184
+twitter:2287356556
+twitter:2895756090
+twitter:407147132
+twitter:3262181
+twitter:313317193
+twitter:2729137002
+twitter:2939122360
+twitter:2751601568
+twitter:1215082350
+twitter:124866576
+twitter:274292311
+twitter:3310301042
+twitter:95407473
+twitter:24993769
+twitter:1342908648
+twitter:1805339413
+twitter:3118252036
+twitter:893269387
+twitter:1481149014
+twitter:463288019
+twitter:75008083
+twitter:2895489727
+twitter:965493739
+twitter:278637248
+twitter:1937513246
+twitter:422218268
+twitter:3320995462
+twitter:78682286
+twitter:2777069098
+twitter:2909553730
+twitter:2914338670
+twitter:1251667531
+twitter:2764034755
+twitter:532659717
+twitter:269002510
+twitter:29373713
+twitter:358075450
+twitter:633880614
+twitter:200374379
+twitter:141628294
+twitter:1513028977
+twitter:116798791
+twitter:2937455354
+twitter:246194623
+twitter:793925970
+twitter:115594167
+twitter:82463176
+twitter:324774974
+twitter:185844856
+twitter:2462295999
+twitter:3555105016
+twitter:1029169117
+twitter:2689309484
+twitter:1587145976
+twitter:1607241271
+twitter:3032276402
+twitter:183916933
+twitter:63766245
+twitter:151217255
+twitter:2781098109
+twitter:252081559
+twitter:1608788256
+twitter:41984573
+twitter:1896587353
+twitter:40136999
+twitter:295505814
+twitter:384867933
+twitter:116947371
+twitter:255703939
+twitter:2687800732
+twitter:76543916
+twitter:881649782
+twitter:2765729924
+twitter:1715695669
+twitter:1965383022
+twitter:2888214228
+twitter:21820514
+twitter:1727966414
+twitter:2581992818
+twitter:103999565
+twitter:741018846
+twitter:446792386
+twitter:2568989424
+twitter:2780674777
+twitter:465934916
+twitter:3378294885
+twitter:2885604327
+twitter:3336273419
+twitter:130742941
+twitter:2327629099
+twitter:1103818104
+twitter:3050036073
+twitter:2882456842
+twitter:2702914248
+twitter:2153674818
+twitter:132825659
+twitter:289758699
+twitter:2995946100
+twitter:3027449217
+twitter:2708029160
+twitter:1529367002
+twitter:608170333
+twitter:140446819
+twitter:2790688993
+twitter:1597308192
+twitter:14462028
+twitter:104062608
+twitter:370274893
+twitter:356145607
+twitter:566542629
+twitter:112587243
+twitter:39372070
+twitter:146853060
+twitter:2440984657
+twitter:3074554539
+twitter:204701034
+twitter:887623447
+twitter:1971521630
+twitter:2457208175
+twitter:466113358
+twitter:1574643830
+twitter:1465533884
+twitter:2500404589
+twitter:1633154150
+twitter:1349117870
+twitter:1658071267
+twitter:593022891
+twitter:3094177813
+twitter:1304672510
+twitter:3385525697
+twitter:2916225552
+twitter:2759773715
+twitter:1369215552
+twitter:1058390078
+twitter:2532850321
+twitter:351483656
+twitter:1902796704
+twitter:113000738
+twitter:2241245557
+twitter:2416606754
+twitter:408729540
+twitter:2530294556
+twitter:2936808249
+twitter:3138999692
+twitter:2679987883
+twitter:1448537377
+twitter:2524773906
+twitter:942079406
+twitter:2217584389
+twitter:3059427504
+twitter:3028507725
+twitter:632766658
+twitter:3302663431
+twitter:2914832897
+twitter:93487101
+twitter:2786054379
+twitter:1339647769
+twitter:531402307
+twitter:402066474
+twitter:337936675
+twitter:2760568625
+twitter:1385916396
+twitter:2595560922
+twitter:421910477
+twitter:1713100813
+twitter:352016040
+twitter:415247994
+twitter:1883606209
+twitter:2974994111
+twitter:1118022211
+twitter:3096979637
+twitter:711889867
+twitter:262890561
+twitter:233810062
+twitter:1877177168
+twitter:964106670
+twitter:164985413
+twitter:2920420361
+twitter:318936782
+twitter:3289826764
+twitter:145873735
+twitter:2523059919
+twitter:2409896179
+twitter:2292047201
+twitter:285674825
+twitter:2765549780
+twitter:2359541905
+twitter:2419103894
+twitter:358884588
+twitter:206231205
+twitter:136500778
+twitter:1397885138
+twitter:2625422097
+twitter:2524578002
+twitter:604278657
+twitter:2625634867
+twitter:73168019
+twitter:407448958
+twitter:189276174
+twitter:2507896925
+twitter:80880449
+twitter:520177827
+twitter:418469102
+twitter:2925075456
+twitter:615730636
+twitter:2995998941
+twitter:2697270934
+twitter:497135011
+twitter:2944598402
+twitter:428706893
+twitter:1345291712
+twitter:388751708
+twitter:130092079
+twitter:2984741882
+twitter:1047514436
+twitter:15927135
+twitter:2884357840
+twitter:294362779
+twitter:2870985800
+twitter:1720400449
+twitter:130027314
+twitter:2970518577
+twitter:240923858
+twitter:1613498838
+twitter:708321211
+twitter:1403382426
+twitter:2602186970
+twitter:1596855998
+twitter:280062526
+twitter:2716454552
+twitter:268720451
+twitter:2869044811
+twitter:1911762488
+twitter:392373280
+twitter:2151082712
+twitter:2770919004
+twitter:231541900
+twitter:60122778
+twitter:390006102
+twitter:240167506
+twitter:1558314660
+twitter:221608257
+twitter:852829933
+twitter:461669243
+twitter:239778483
+twitter:502146157
+twitter:1471963970
+twitter:276426707
+twitter:2336546150
+twitter:323595235
+twitter:128670043
+twitter:1308641714
+twitter:1411112756
+twitter:3011727217
+twitter:3082006921
+twitter:450537474
+twitter:2673101407
+twitter:2416030447
+twitter:51952627
+twitter:708057486
+twitter:833620748
+twitter:3024957797
+twitter:2147572362
+twitter:1712467098
+twitter:2899300501
+twitter:1348351772
+twitter:2923114629
+twitter:2779232814
+twitter:21306308
+twitter:1466314507
+twitter:1224588289
+twitter:81307783
+twitter:42717316
+twitter:315972617
+twitter:434649827
+twitter:105839296
+twitter:366063496
+twitter:34045892
+twitter:3076447389
+twitter:92437198
+twitter:3124335006
+twitter:1444393410
+twitter:351737762
+twitter:1919360383
+twitter:2836048345
+twitter:1670939112
+twitter:722140159
+twitter:92939425
+twitter:2932728756
+twitter:2831872033
+twitter:1354255123
+twitter:1689738186
+twitter:463578260
+twitter:2881582438
+twitter:912252510
+twitter:3226221887
+twitter:390827200
+twitter:269169237
+twitter:1450007192
+twitter:2735984326
+twitter:3029836305
+twitter:28291382
+twitter:785668627
+twitter:567287970
+twitter:1480004420
+twitter:131927864
+twitter:2958631308
+twitter:488490020
+twitter:2603422688
+twitter:3186614985
+twitter:177373618
+twitter:2466506329
+twitter:2651294251
+twitter:3367170684
+twitter:2673870882
+twitter:369098635
+twitter:242011326
+twitter:18099277
+twitter:1922210574
+twitter:3093762445
+twitter:470634878
+twitter:1674607392
+twitter:2920526283
+twitter:3261677580
+twitter:2192187078
+twitter:485599960
+twitter:1854850729
+twitter:95198467
+twitter:2228217740
+twitter:2171528344
+twitter:2957461230
+twitter:226615737
+twitter:1624183567
+twitter:158597677
+twitter:2909224690
+twitter:19278114
+twitter:2488284258
+twitter:2777071149
+twitter:1598064697
+twitter:2740691127
+twitter:3100908480
+twitter:1147010126
+twitter:2741161553
+twitter:439971668
+twitter:3247227273
+twitter:2884261062
+twitter:3127250575
+twitter:2942021278
+twitter:539428196
+twitter:409599986
+twitter:3161801331
+twitter:2328613860
+twitter:1903013437
+twitter:313082004
+twitter:2580495721
+twitter:209464435
+twitter:600172085
+twitter:339541217
+twitter:62219810
+twitter:583287316
+twitter:295891933
+twitter:561683767
+twitter:229192352
+twitter:1357869918
+twitter:235438136
+twitter:1599249169
+twitter:583879210
+twitter:507744802
+twitter:1696336261
+twitter:2323537206
+twitter:36882220
+twitter:541528426
+twitter:956202559
+twitter:387936537
+twitter:211658842
+twitter:2685186010
+twitter:2581656488
+twitter:391154378
+twitter:122932105
+twitter:409764153
+twitter:129737967
+twitter:2848806360
+twitter:3054860719
+twitter:372199585
+twitter:2316121597
+twitter:703345746
+twitter:3335505287
+twitter:2466151422
+twitter:380038166
+twitter:420561214
+twitter:2977085351
+twitter:110955327
+twitter:3004295886
+twitter:2362857361
+twitter:3053844460
+twitter:3182081552
+twitter:324208260
+twitter:2571790321
+twitter:1061498868
+twitter:2187395299
+twitter:2187482779
+twitter:3096652530
+twitter:2538239672
+twitter:3809634552
+twitter:2306848839
+twitter:1544061547
+twitter:151075965
+twitter:3250238556
+twitter:16157689
+twitter:1692663644
+twitter:1356000732
+twitter:436774994
+twitter:45503055
+twitter:1086037316
+twitter:2798297775
+twitter:2923485772
+twitter:58731726
+twitter:211816170
+twitter:885013716
+twitter:2608529078
+twitter:2954917057
+twitter:2271021600
+twitter:173743066
+twitter:451543575
+twitter:3219728436
+twitter:399824828
+twitter:2464688153
+twitter:2541069631
+twitter:1522892262
+twitter:3167829845
+twitter:944851321
+twitter:2471474509
+twitter:68073858
+twitter:1496221376
+twitter:13979882
+twitter:2218792189
+twitter:302123873
+twitter:2845915546
+twitter:431402814
+twitter:1364254945
+twitter:2711277666
+twitter:2766696876
+twitter:2495441323
+twitter:2844317433
+twitter:138009079
+twitter:2578631100
+twitter:478167529
+twitter:1222728360
+twitter:1323688411
+twitter:2883066187
+twitter:2443554697
+twitter:411631689
+twitter:68537682
+twitter:1027019269
+twitter:1660752493
+twitter:987324488
+twitter:2764106926
+twitter:2184511674
+twitter:103419315
+twitter:2310456424
+twitter:1572938088
+twitter:2554895281
+twitter:34138105
+twitter:2942100621
+twitter:160517898
+twitter:285075974
+twitter:2260805169
+twitter:19390498
+twitter:301696842
+twitter:2588239985
+twitter:2886588596
+twitter:2962622367
+twitter:1867897483
+twitter:2827053488
+twitter:1447767319
+twitter:2924491293
+twitter:167327096
+twitter:3309592402
+twitter:2795575638
+twitter:578758971
+twitter:2888665561
+twitter:30542348
+twitter:1437049609
+twitter:2242541566
+twitter:74354017
+twitter:58900854
+twitter:2159055031
+twitter:246517688
+twitter:2916873012
+twitter:1110055280
+twitter:562430843
+twitter:761797794
+twitter:1648208552
+twitter:301483343
+twitter:2896842048
+twitter:522103295
+twitter:1578517986
+twitter:2659610776
+twitter:2890560429
+twitter:1427665578
+twitter:268363160
+twitter:563709041
+twitter:2172300002
+twitter:2791262431
+twitter:3039809351
+twitter:2914940301
+twitter:2746560353
+twitter:2892191616
+twitter:71596845
+twitter:233770184
+twitter:1530949130
+twitter:105906110
+twitter:755347622
+twitter:490836906
+twitter:357603454
+twitter:324517203
+twitter:2835402315
+twitter:3285479894
+twitter:86368327
+twitter:238219970
+twitter:3153173945
+twitter:2732361234
+twitter:2357626327
+twitter:346602505
+twitter:13732632
+twitter:44055265
+twitter:2998032219
+twitter:482072312
+twitter:1721073866
+twitter:1386781034
+twitter:168194206
+twitter:1213443144
+twitter:181296114
+twitter:942598400
+twitter:2955577216
+twitter:582056669
+twitter:747540468
+twitter:2371722140
+twitter:360824004
+twitter:3023711736
+twitter:207032580
+twitter:2748107976
+twitter:464428175
+twitter:3150849096
+twitter:85450014
+twitter:2840066340
+twitter:2287819200
+twitter:240931426
+twitter:553606800
+twitter:397876544
+twitter:2195298230
+twitter:2601812005
+twitter:3013344739
+twitter:17599363
+twitter:1572639314
+twitter:3377673407
+twitter:303420278
+twitter:2811879995
+twitter:526860891
+twitter:346333874
+twitter:113568311
+twitter:705488304
+twitter:3238867619
+twitter:333772149
+twitter:373309716
+twitter:300472003
+twitter:3223424681
+twitter:2895699896
+twitter:3241119570
+twitter:1147453440
+twitter:3135402609
+twitter:521763744
+twitter:2702966971
+twitter:2878317616
+twitter:845031697
+twitter:2855454471
+twitter:3051902539
+twitter:482306439
+twitter:129173738
+twitter:306572138
+twitter:2941951538
+twitter:762707233
+twitter:2732608168
+twitter:1228456939
+twitter:246020724
+twitter:1920607602
+twitter:14434245
+twitter:1254943537
+twitter:1520746602
+twitter:150745124
+twitter:1350160351
+twitter:38707222
+twitter:267766858
+twitter:2992121760
+twitter:712666764
+twitter:983036864
+twitter:289490939
+twitter:269797384
+twitter:100215048
+twitter:3099557245
+twitter:2339741570
+twitter:306005146
+twitter:1182227460
+twitter:288235870
+twitter:1412832260
+twitter:455190443
+twitter:489912183
+twitter:448994061
+twitter:2944595072
+twitter:2453094914
+twitter:2899434206
+twitter:59288818
+twitter:2824706688
+twitter:423363992
+twitter:972850482
+twitter:997868714
+twitter:1203750733
+twitter:176147179
+twitter:115110596
+twitter:2978397615
+twitter:2528946267
+twitter:620180433
+twitter:365949935
+twitter:110609853
+twitter:1533494268
+twitter:2723839166
+twitter:34186887
+twitter:2864430424
+twitter:76942977
+twitter:361086733
+twitter:2724200587
+twitter:635206139
+twitter:2757801421
+twitter:19651443
+twitter:3364322949
+twitter:2770576744
+twitter:2168612560
+twitter:764020297
+twitter:2558268513
+twitter:2855384901
+twitter:1881414907
+twitter:2502212139
+twitter:3250037586
+twitter:2525185944
+twitter:591375982
+twitter:707911211
+twitter:3025041666
+twitter:19785599
+twitter:2311172950
+twitter:922817815
+twitter:739363530
+twitter:2812894393
+twitter:2496283986
+twitter:206162815
+twitter:590916342
+twitter:354053245
+twitter:2735195854
+twitter:2788759128
+twitter:3510947235
+twitter:3490740532
+twitter:2920847304
+twitter:2681444558
+twitter:2856805755
+twitter:3103899682
+twitter:145893832
+twitter:3065663910
+twitter:2736009516
+twitter:2835226230
+twitter:1590913771
+twitter:2700889555
+twitter:2221272164
+twitter:109780161
+twitter:700221218
+twitter:541753453
+twitter:126575915
+twitter:274336817
+twitter:2498172455
+twitter:2809515630
+twitter:2588774684
+twitter:296734891
+twitter:2212410182
+twitter:243027454
+twitter:1336526904
+twitter:397062736
+twitter:449331876
+twitter:30619307
+twitter:2310483811
+twitter:2437586509
+twitter:191710730
+twitter:1084185378
+twitter:2831486681
+twitter:1606477879
+twitter:969600636
+twitter:529783214
+twitter:2928131586
+twitter:190041293
+twitter:2967031274
+twitter:2165962781
+twitter:376501355
+twitter:284137985
+twitter:266863824
+twitter:407944074
+twitter:108456036
+twitter:1641294422
+twitter:900733706
+twitter:1063071450
+twitter:1682722328
+twitter:341419520
+twitter:1644293778
+twitter:2245151467
+twitter:511176989
+twitter:241922669
+twitter:3388315624
+twitter:1909431145
+twitter:2223820028
+twitter:600581315
+twitter:1723555076
+twitter:2748445313
+twitter:561211823
+twitter:561022931
+twitter:2751429993
+twitter:2714908343
+twitter:16165257
+twitter:524623359
+twitter:306741266
+twitter:469994381
+twitter:2561892084
+twitter:998802661
+twitter:1492924374
+twitter:789039140
+twitter:210150093
+twitter:817544820
+twitter:35740178
+twitter:326162841
+twitter:1447331628
+twitter:17493441
+twitter:2874693608
+twitter:965027312
+twitter:261936985
+twitter:510564259
+twitter:728031187
+twitter:164696234
+twitter:2204519310
+twitter:1626241164
+twitter:1024940588
+twitter:221486613
+twitter:571084565
+twitter:3029264508
+twitter:221716563
+twitter:2211417135
+twitter:499972359
+twitter:1565989165
+twitter:2436927208
+twitter:381029291
+twitter:2730580620
+twitter:3436438413
+twitter:2466014604
+twitter:538990742
+twitter:2935470687
+twitter:1162845468
+twitter:468108082
+twitter:2383897542
+twitter:2542119658
+twitter:1962281514
+twitter:171235080
+twitter:536915535100125185
+twitter:2841076618
+twitter:3006098500
+twitter:1057158554
+twitter:3245676721
+twitter:251087536
+twitter:3082811549
+twitter:281785349
+twitter:1674871100
+twitter:1898659951
+twitter:1414854156
+twitter:428693618
+twitter:2385953101
+twitter:2281213477
+twitter:2786368894
+twitter:2253203998
+twitter:357277727
+twitter:1358707970
+twitter:545186198
+twitter:3033613587
+twitter:107121821
+twitter:595965259
+twitter:583894637
+twitter:1306698787
+twitter:442262869
+twitter:2868353318
+twitter:1908436844
+twitter:271982042
+twitter:495202171
+twitter:251586884
+twitter:3151032974
+twitter:2213682568
+twitter:1203133039
+twitter:193128957
+twitter:597407120
+twitter:2781102086
+twitter:369254505
+twitter:62831036
+twitter:2328734640
+twitter:2579064082
+twitter:3271313827
+twitter:2880366619
+twitter:2323026113
+twitter:446380518
+twitter:245418139
+twitter:261211664
+twitter:1893329208
+twitter:3406596309
+twitter:584967077
+twitter:1708862304
+twitter:388961426
+twitter:2421535351
+twitter:2194375668
+twitter:2790313673
+twitter:2728894977
+twitter:2829174824
+twitter:784541196
+twitter:959902393
+twitter:249705367
+twitter:1677679309
+twitter:2825975175
+twitter:1305768366
+twitter:373475046
+twitter:785362464
+twitter:419607671
+twitter:61031675
+twitter:3854236343
+twitter:714603248
+twitter:1301447720
+twitter:827660912
+twitter:2383764684
+twitter:3180084906
+twitter:3265558124
+twitter:608536922
+twitter:238943561
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFollowersIT.conf b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFollowersIT.conf
new file mode 100644
index 0000000..0d045c4
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFollowersIT.conf
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+source {
+  fields = ["ID"]
+  scheme = file
+  path = "target/test-classes"
+  readerPath = "asf.txt"
+}
+destination {
+  fields = ["DOC"]
+  scheme = file
+  path = "target/test-classes"
+  writerPath = "FlinkTwitterFollowingPipelineFollowersIT"
+}
+twitter {
+  endpoint = followers
+  ids_only = true
+  max_items = 5000
+}
+providerWaitMs = 1000
+local = true
+test = true
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFriendsIT.conf b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFriendsIT.conf
new file mode 100644
index 0000000..cb646e6
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterFollowingPipelineFriendsIT.conf
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+source {
+  fields = ["ID"]
+  scheme = file
+  path = "target/test-classes"
+  readerPath = "asf.txt"
+}
+destination {
+  fields = ["DOC"]
+  scheme = file
+  path = "target/test-classes"
+  writerPath = "FlinkTwitterFollowingPipelineFriendsIT"
+}
+twitter {
+  endpoint = friends
+  ids_only = true
+}
+providerWaitMs = 1000
+local = true
+test = true
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterPostsPipelineIT.conf b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterPostsPipelineIT.conf
new file mode 100644
index 0000000..567253d
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterPostsPipelineIT.conf
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+source {
+  fields = ["ID"]
+  scheme = file
+  path = "target/test-classes"
+  readerPath = "asf.txt"
+}
+destination {
+  fields = ["DOC"]
+  scheme = file
+  path = "target/test-classes"
+  writerPath = "FlinkTwitterPostsPipelineIT"
+}
+providerWaitMs = 1000
+local = true
+test = true
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterSpritzerPipelineIT.conf b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterSpritzerPipelineIT.conf
new file mode 100644
index 0000000..354beea
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterSpritzerPipelineIT.conf
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+destination {
+  fields = ["DOC"]
+  scheme = file
+  path = "target/test-classes"
+  writerPath = "FlinkTwitterSpritzerPipelineIT"
+}
+twitter {
+  endpoint = sample
+  track = [
+    "data"
+  ]
+}
+providerWaitMs = 1000
+local = true
+test = true
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterUserInformationPipelineIT.conf b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterUserInformationPipelineIT.conf
new file mode 100644
index 0000000..22d24cc
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/FlinkTwitterUserInformationPipelineIT.conf
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+source {
+  fields = ["ID"]
+  scheme = file
+  path = "target/test-classes"
+  readerPath = "1000twitterids.txt"
+}
+destination {
+  fields = ["DOC"]
+  scheme = file
+  path = "target/test-classes"
+  writerPath = "FlinkTwitterUserInformationPipelineIT"
+}
+providerWaitMs = 1000
+local = true
+test = true
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/asf.txt b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/asf.txt
new file mode 100644
index 0000000..c2b1ea1
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/resources/asf.txt
@@ -0,0 +1 @@
+twitter:18055613
\ No newline at end of file
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFollowersIT.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFollowersIT.scala
new file mode 100644
index 0000000..be22b82
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFollowersIT.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.test
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator}
+import org.apache.streams.examples.flink.twitter.TwitterFollowingPipelineConfiguration
+import org.apache.streams.examples.flink.twitter.collection.FlinkTwitterFollowingPipeline
+import org.scalatest.FlatSpec
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.slf4j.{Logger, LoggerFactory}
+import org.testng.annotations.Test
+
+import scala.io.Source
+
+/**
+  * FlinkTwitterFollowingPipelineFollowersIT is an integration test for FlinkTwitterFollowingPipeline.
+  */
+class FlinkTwitterFollowingPipelineFollowersIT extends FlatSpec {
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterFollowingPipelineFollowersIT])
+
+  import FlinkTwitterFollowingPipeline._
+
+  @Test
+  def flinkTwitterFollowersPipelineFollowersIT = {
+
+    val reference: Config = ConfigFactory.load()
+    val conf_file: File = new File("target/test-classes/FlinkTwitterFollowingPipelineFollowersIT.conf")
+    assert(conf_file.exists())
+    val testResourceConfig: Config = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    val typesafe: Config = testResourceConfig.withFallback(reference).resolve()
+    val streams: StreamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe)
+    val testConfig = new ComponentConfigurator(classOf[TwitterFollowingPipelineConfiguration]).detectConfiguration(typesafe)
+
+    setup(testConfig)
+
+    val job = new FlinkTwitterFollowingPipeline(config = testConfig)
+    val jobThread = new Thread(job)
+    jobThread.start
+    jobThread.join
+
+    eventually (timeout(60 seconds), interval(1 seconds)) {
+      assert(Files.exists(Paths.get(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath)))
+      assert(
+        Source.fromFile(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath, "UTF-8").getLines.size
+          > 4000)
+    }
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFriendsIT.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFriendsIT.scala
new file mode 100644
index 0000000..9829ebc
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterFollowingPipelineFriendsIT.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.peoplepattern.streams.twitter.collection
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator}
+import org.apache.streams.examples.flink.twitter.TwitterFollowingPipelineConfiguration
+import org.apache.streams.examples.flink.twitter.collection.FlinkTwitterFollowingPipeline
+import org.scalatest.FlatSpec
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.slf4j.{Logger, LoggerFactory}
+import org.testng.annotations.Test
+
+import scala.io.Source
+
+/**
+  * FlinkTwitterFollowingPipelineFriendsIT is an integration test for FlinkTwitterFollowingPipeline.
+  */
+class FlinkTwitterFollowingPipelineFriendsIT extends FlatSpec {
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterFollowingPipelineFriendsIT])
+
+  import FlinkTwitterFollowingPipeline._
+
+  @Test
+  def flinkTwitterFollowersPipelineFriendsIT = {
+
+    val reference: Config = ConfigFactory.load()
+    val conf_file: File = new File("target/test-classes/FlinkTwitterFollowingPipelineFriendsIT.conf")
+    assert(conf_file.exists())
+    val testResourceConfig: Config = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    val typesafe: Config = testResourceConfig.withFallback(reference).resolve()
+    val streams: StreamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe)
+    val testConfig = new ComponentConfigurator(classOf[TwitterFollowingPipelineConfiguration]).detectConfiguration(typesafe)
+
+    setup(testConfig)
+
+    val job = new FlinkTwitterFollowingPipeline(config = testConfig)
+    val jobThread = new Thread(job)
+    jobThread.start
+    jobThread.join
+
+    eventually (timeout(60 seconds), interval(1 seconds)) {
+      assert(Files.exists(Paths.get(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath)))
+      assert(
+        Source.fromFile(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath, "UTF-8").getLines.size
+          > 90)
+    }
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterPostsPipelineIT.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterPostsPipelineIT.scala
new file mode 100644
index 0000000..987e82d
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterPostsPipelineIT.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.peoplepattern.streams.twitter.collection
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator}
+import org.apache.streams.examples.flink.twitter.TwitterPostsPipelineConfiguration
+import org.apache.streams.examples.flink.twitter.collection.FlinkTwitterPostsPipeline
+import org.scalatest.FlatSpec
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.slf4j.{Logger, LoggerFactory}
+import org.testng.annotations.Test
+
+import scala.io.Source
+
+/**
+  * FlinkTwitterPostsPipelineIT is an integration test for FlinkTwitterPostsPipeline.
+  */
+class FlinkTwitterPostsPipelineIT extends FlatSpec  {
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterPostsPipelineIT])
+
+  import FlinkTwitterPostsPipeline._
+
+  @Test
+  def flinkTwitterPostsPipelineIT = {
+
+    val reference: Config = ConfigFactory.load()
+    val conf_file: File = new File("target/test-classes/FlinkTwitterPostsPipelineIT.conf")
+    assert(conf_file.exists())
+    val testResourceConfig: Config = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    val typesafe: Config = testResourceConfig.withFallback(reference).resolve()
+    val streams: StreamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe)
+    val testConfig = new ComponentConfigurator(classOf[TwitterPostsPipelineConfiguration]).detectConfiguration(typesafe)
+
+    setup(testConfig)
+
+    val job = new FlinkTwitterPostsPipeline(config = testConfig)
+    val jobThread = new Thread(job)
+    jobThread.start
+    jobThread.join
+
+    eventually (timeout(30 seconds), interval(1 seconds)) {
+      assert(Files.exists(Paths.get(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath)))
+      assert(
+        Source.fromFile(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath, "UTF-8").getLines.size
+          >= 200)
+    }
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterSpritzerPipelineIT.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterSpritzerPipelineIT.scala
new file mode 100644
index 0000000..7570bac
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterSpritzerPipelineIT.scala
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.examples.flink.twitter.test
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator}
+import org.apache.streams.examples.flink.twitter.TwitterSpritzerPipelineConfiguration
+import org.apache.streams.examples.flink.twitter.collection.FlinkTwitterSpritzerPipeline
+import org.scalatest.FlatSpec
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.slf4j.{Logger, LoggerFactory}
+import org.testng.annotations.Test
+
+import scala.io.Source
+
+/**
+  * FlinkTwitterSpritzerPipelineIT is an integration test for FlinkTwitterSpritzerPipeline.
+  */
+class FlinkTwitterSpritzerPipelineIT extends FlatSpec {
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterSpritzerPipelineIT])
+
+  import FlinkTwitterSpritzerPipeline._
+
+  @Test
+  def flinkTwitterSpritzerPipelineIT = {
+
+    val reference: Config = ConfigFactory.load()
+    val conf_file: File = new File("target/test-classes/FlinkTwitterSpritzerPipelineIT.conf")
+    assert(conf_file.exists())
+    val testResourceConfig: Config = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    val typesafe: Config = testResourceConfig.withFallback(reference).resolve()
+    val streams: StreamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe)
+    val testConfig = new ComponentConfigurator(classOf[TwitterSpritzerPipelineConfiguration]).detectConfiguration(typesafe)
+
+    setup(testConfig)
+
+    val job = new FlinkTwitterSpritzerPipeline(config = testConfig)
+    val jobThread = new Thread(job)
+    jobThread.start
+    jobThread.join(30000)
+    job.stop()
+
+    eventually (timeout(60 seconds), interval(1 seconds)) {
+      assert(Files.exists(Paths.get(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath)))
+      assert(
+        Source.fromFile(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath, "UTF-8").getLines.size
+          >= 10)
+    }
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterUserInformationPipelineIT.scala b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterUserInformationPipelineIT.scala
new file mode 100644
index 0000000..ab88d48
--- /dev/null
+++ b/streams-examples/streams-examples-flink/flink-twitter-collection/src/test/scala/org/apache/streams/examples/flink/twitter/test/FlinkTwitterUserInformationPipelineIT.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.peoplepattern.streams.twitter.collection
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import com.typesafe.config.{Config, ConfigFactory, ConfigParseOptions}
+import org.apache.streams.config.{ComponentConfigurator, StreamsConfiguration, StreamsConfigurator}
+import org.apache.streams.examples.flink.twitter.TwitterUserInformationPipelineConfiguration
+import org.apache.streams.examples.flink.twitter.collection.FlinkTwitterUserInformationPipeline
+import org.scalatest.FlatSpec
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.time.SpanSugar._
+import org.slf4j.{Logger, LoggerFactory}
+import org.testng.annotations.Test
+
+import scala.io.Source
+
+/**
+  * FlinkTwitterUserInformationPipelineIT is an integration test for FlinkTwitterUserInformationPipeline.
+  */
+class FlinkTwitterUserInformationPipelineIT extends FlatSpec {
+
+  private val LOGGER: Logger = LoggerFactory.getLogger(classOf[FlinkTwitterUserInformationPipelineIT])
+
+  import FlinkTwitterUserInformationPipeline._
+
+  @Test
+  def flinkTwitterUserInformationPipelineIT = {
+
+    val reference: Config = ConfigFactory.load()
+    val conf_file: File = new File("target/test-classes/FlinkTwitterUserInformationPipelineIT.conf")
+    assert(conf_file.exists())
+    val testResourceConfig: Config = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    val typesafe: Config = testResourceConfig.withFallback(reference).resolve()
+    val streams: StreamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe)
+    val testConfig = new ComponentConfigurator(classOf[TwitterUserInformationPipelineConfiguration]).detectConfiguration(typesafe)
+
+    setup(testConfig)
+
+    val job = new FlinkTwitterUserInformationPipeline(config = testConfig)
+    val jobThread = new Thread(job)
+    jobThread.start
+    jobThread.join
+
+    eventually (timeout(30 seconds), interval(1 seconds)) {
+      assert(Files.exists(Paths.get(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath)))
+      assert(
+        Source.fromFile(testConfig.getDestination.getPath + "/" + testConfig.getDestination.getWriterPath, "UTF-8").getLines.size
+          > 500)
+    }
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-flink/pom.xml b/streams-examples/streams-examples-flink/pom.xml
new file mode 100644
index 0000000..c462206
--- /dev/null
+++ b/streams-examples/streams-examples-flink/pom.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>streams-examples</artifactId>
+        <groupId>org.apache.streams.examples</groupId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>streams-examples-flink</artifactId>
+
+    <packaging>pom</packaging>
+    <name>streams-examples-flink</name>
+
+    <description>Contributed examples of use cases for Streams using flink</description>
+
+    <properties>
+
+    </properties>
+
+    <modules>
+        <module>flink-twitter-collection</module>
+    </modules>
+
+</project>
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/README.md b/streams-examples/streams-examples-local/elasticsearch-hdfs/README.md
new file mode 100644
index 0000000..b6653da
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:elasticsearch-reindex
+========================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/pom.xml b/streams-examples/streams-examples-local/elasticsearch-hdfs/pom.xml
new file mode 100644
index 0000000..4ed94ae
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/pom.xml
@@ -0,0 +1,322 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>..</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-elasticsearch-hdfs</artifactId>
+    <name>elasticsearch-hdfs</name>
+
+    <description>Copies documents between elasticsearch and file system using the hdfs persist module.</description>
+
+    <properties>
+        <elasticsearch.version>2.4.6</elasticsearch.version>
+        <lucene.version>5.5.4</lucene.version>
+        <docker.repo>apachestreams</docker.repo>
+        <hdfs.version>2.7.0</hdfs.version>
+    </properties>
+
+    <dependencies>
+        <!-- Test includes -->
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-codecs</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${elasticsearch.version}</version>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-util</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-schema-activitystreams</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-hdfs</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-hdfs</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${hdfs.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.examples.elasticsearch</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-elasticsearch</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-hdfs</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <includes>**/*.json,**/*.conf</includes>
+                    <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                    <includeGroupIds>org.apache.streams</includeGroupIds>
+                    <includeTypes>test-jar</includeTypes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test-resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <configuration>
+                    <!-- Run integration test suite rather than individual tests. -->
+                    <suiteXmlFiles>
+                        <suiteXmlFile>target/test-classes/testng.xml</suiteXmlFile>
+                    </suiteXmlFiles>
+                    <!--<excludes>-->
+                        <!--<exclude>**/*Test.java</exclude>-->
+                        <!--<exclude>**/*Tests.java</exclude>-->
+                    <!--</excludes>-->
+                    <!--<includes>-->
+                        <!--<exclude>**/*IT.java</exclude>-->
+                        <!--<include>**/*ITs.java</include>-->
+                    <!--</includes>-->
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/ElasticsearchHdfs.java b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/ElasticsearchHdfs.java
new file mode 100644
index 0000000..d2d0869
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/ElasticsearchHdfs.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.elasticsearch.ElasticsearchPersistReader;
+import org.apache.streams.hdfs.WebHdfsPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Copies documents from an elasticsearch index to new-line delimited json on dfs.
+ */
+public class ElasticsearchHdfs implements Runnable {
+
+  public final static String STREAMS_ID = "ElasticsearchHdfs";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchHdfs.class);
+
+  ElasticsearchHdfsConfiguration config;
+
+  public ElasticsearchHdfs() {
+    this(new ComponentConfigurator<>(ElasticsearchHdfsConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+  }
+
+  public ElasticsearchHdfs(ElasticsearchHdfsConfiguration reindex) {
+    this.config = reindex;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+    ElasticsearchHdfs backup = new ElasticsearchHdfs();
+    new Thread(backup).start();
+  }
+
+  @Override
+  public void run() {
+
+    ElasticsearchPersistReader elasticsearchPersistReader = new ElasticsearchPersistReader(config.getSource());
+    WebHdfsPersistWriter hdfsPersistWriter = new WebHdfsPersistWriter(config.getDestination());
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(ElasticsearchPersistReader.class.getCanonicalName(), elasticsearchPersistReader);
+    builder.addStreamsPersistWriter(WebHdfsPersistWriter.class.getCanonicalName(), hdfsPersistWriter, 1, ElasticsearchPersistReader.class.getCanonicalName());
+    builder.start();
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/HdfsElasticsearch.java b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/HdfsElasticsearch.java
new file mode 100644
index 0000000..cca8472
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/java/org/apache/streams/example/HdfsElasticsearch.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
+import org.apache.streams.hdfs.WebHdfsPersistReader;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Copies documents from new-line delimited json on dfs to an elasticsearch index.
+ */
+public class HdfsElasticsearch implements Runnable {
+
+  public final static String STREAMS_ID = "HdfsElasticsearch";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(HdfsElasticsearch.class);
+
+  HdfsElasticsearchConfiguration config;
+
+  public HdfsElasticsearch() {
+    this(new ComponentConfigurator<>(HdfsElasticsearchConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+  }
+
+  public HdfsElasticsearch(HdfsElasticsearchConfiguration reindex) {
+    this.config = reindex;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+    HdfsElasticsearch restore = new HdfsElasticsearch();
+    new Thread(restore).start();
+  }
+
+  @Override
+  public void run() {
+
+    WebHdfsPersistReader webHdfsPersistReader = new WebHdfsPersistReader(config.getSource());
+    ElasticsearchPersistWriter elasticsearchPersistWriter = new ElasticsearchPersistWriter(config.getDestination());
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(WebHdfsPersistReader.class.getCanonicalName(), webHdfsPersistReader);
+    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.class.getCanonicalName(), elasticsearchPersistWriter, 1, WebHdfsPersistReader.class.getCanonicalName());
+    builder.start();
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/ElasticsearchHdfsConfiguration.json b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/ElasticsearchHdfsConfiguration.json
new file mode 100644
index 0000000..ee17a3d
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/ElasticsearchHdfsConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.ElasticsearchHdfsConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "source": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchReaderConfiguration", "type": "object", "required": true },
+    "destination": { "javaType": "org.apache.streams.hdfs.HdfsWriterConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/HdfsElasticsearchConfiguration.json b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/HdfsElasticsearchConfiguration.json
new file mode 100644
index 0000000..4239279
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/jsonschema/HdfsElasticsearchConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.HdfsElasticsearchConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "source": { "javaType": "org.apache.streams.hdfs.HdfsReaderConfiguration", "type": "object", "required": true },
+    "destination": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/ElasticsearchHdfs.dot b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/ElasticsearchHdfs.dot
new file mode 100644
index 0000000..3c261e5
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/ElasticsearchHdfs.dot
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  ElasticsearchPersistReader [label="ElasticsearchPersistReader",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java"];
+
+  //persisters
+  WebHdfsPersistWriter [label="WebHdfsPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+
+  //data
+  source [label="es://{indexes}/{types}",shape=box];
+  destination [label="hdfs://{index}/{type}",shape=box];
+
+  //stream
+  source -> ElasticsearchPersistReader
+  ElasticsearchPersistReader -> WebHdfsPersistWriter [label="String"];
+  WebHdfsPersistWriter -> destination
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/HdfsElasticsearch.dot b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/HdfsElasticsearch.dot
new file mode 100644
index 0000000..8219c30
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/main/resources/HdfsElasticsearch.dot
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  WebHdfsPersistReader [label="WebHdfsPersistReader",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java"];
+
+  //persisters
+  ElasticsearchPersistWriter [label="ElasticsearchPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+
+  //data
+  source [label="hdfs://{indexes}/{types}",shape=box];
+  destination [label="es://{index}/{type}",shape=box];
+
+  //stream
+  source -> WebHdfsPersistReader
+  WebHdfsPersistReader -> ElasticsearchPersistWriter [label="String"];
+  ElasticsearchPersistWriter -> destination
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/ElasticsearchHdfs.md b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/ElasticsearchHdfs.md
new file mode 100644
index 0000000..3294e09
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/ElasticsearchHdfs.md
@@ -0,0 +1,49 @@
+### ElasticsearchHdfs
+
+#### Description:
+
+Copies documents from elasticsearch to hdfs.
+
+#### Configuration:
+
+[ElasticsearchHdfs.json](ElasticsearchHdfs.json "ElasticsearchHdfs.json" )
+
+##### application.conf
+
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    source = ${elasticsearch}
+    source {
+      indexes += "elasticsearch_persist_writer_it"
+      types += "activity"
+    }
+    destination {
+      fields = ["ID","DOC"]
+      scheme = file
+      user = hadoop
+      path = "target/test-classes"
+      writerPath = "elasticsearch_hdfs_it"
+    }
+        
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "elasticsearch-hdfs" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.ElasticsearchHdfs
+
+#### Run (Docker):
+
+    docker run apachestreams/elasticsearch-hdfs java -cp elasticsearch-hdfs-jar-with-dependencies.jar -Dconfig.url=http://<location_of_config_file> org.apache.streams.example.ElasticsearchHdfs
+
+#### Specification:
+
+[ElasticsearchHdfs.dot](ElasticsearchHdfs.dot "ElasticsearchHdfs.dot" )
+
+#### Diagram:
+
+![ElasticsearchHdfs.dot.svg](./ElasticsearchHdfs.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/HdfsElasticsearch.md b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/HdfsElasticsearch.md
new file mode 100644
index 0000000..ecd8445
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/HdfsElasticsearch.md
@@ -0,0 +1,51 @@
+### HdfsElasticsearch
+
+#### Description:
+
+Copies documents from hdfs to elasticsearch.
+
+#### Configuration:
+
+[HdfsElasticsearch.json](HdfsElasticsearch.json "HdfsElasticsearch.json" )
+
+##### application.conf
+
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    source {
+      fields = ["ID","DOC"]
+      scheme = file
+      user = hadoop
+      path = "target/test-classes"
+      readerPath = "elasticsearch_hdfs_it"
+    }
+    destination = ${elasticsearch}
+    destination {
+      index = "hdfs_elasticsearch_it"
+      type = "activity"
+      refresh = true
+      forceUseConfig = true
+    }
+    
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "elasticsearch-hdfs" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.ElasticsearchHdfs
+
+#### Run (Docker):
+
+    docker run elasticsearch-hdfs java -cp elasticsearch-hdfs-jar-with-dependencies.jar -Dconfig.file=`pwd`/HdfsElasticsearchIT.conf org.apache.streams.example.HdfsElasticsearch
+
+#### Specification:
+
+[HdfsElasticsearch.dot](HdfsElasticsearch.dot "HdfsElasticsearch.dot" )
+
+#### Diagram:
+
+![HdfsElasticsearch.dot.svg](./HdfsElasticsearch.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/index.md b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/index.md
new file mode 100644
index 0000000..4be1820
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/markdown/index.md
@@ -0,0 +1,32 @@
+### elasticsearch-hdfs
+
+#### Requirements:
+ - A running ElasticSearch 2.0.0+ instance
+
+#### Streams:
+
+<a href="HdfsElasticsearch.html" target="_self">HdfsElasticsearch</a>
+
+<a href="ElasticsearchHdfs.html" target="_self">ElasticsearchHdfs</a>
+
+#### Build:
+
+    mvn clean install
+
+#### Test:
+
+Start up elasticsearch with docker:
+     
+    mvn -PdockerITs docker:start
+ 
+Build with integration testing enabled:
+ 
+    mvn clean test verify -DskipITs=false
+ 
+Shutdown elasticsearch when finished:
+ 
+    mvn -PdockerITs docker:stop
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/site.xml b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/site.xml
new file mode 100644
index 0000000..5fb3b4d
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/site/site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Services">
+            <item name="Elasticsearch" href="../../../streams-project/services/elasticsearch.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/ElasticsearchHdfsIT.java b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/ElasticsearchHdfsIT.java
new file mode 100644
index 0000000..9b70440
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/ElasticsearchHdfsIT.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.ElasticsearchHdfs;
+import org.apache.streams.example.ElasticsearchHdfsConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+import static org.testng.Assert.assertNotEquals;
+
+/**
+ * ElasticsearchHdfsIT is an integration test for ElasticsearchHdfs.
+ */
+public class ElasticsearchHdfsIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchHdfsIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected ElasticsearchHdfsConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchHdfsIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchHdfsConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getSource()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getSource().getIndexes().get(0));
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertThat(indicesExistsResponse.isExists(), is(true));
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getSource().getIndexes().get(0))
+        .setTypes(testConfiguration.getSource().getTypes().get(0));
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    assertNotEquals(count, 0);
+  }
+
+  @Test
+  public void ElasticsearchHdfsIT() throws Exception {
+
+    ElasticsearchHdfs backup = new ElasticsearchHdfs(testConfiguration);
+    backup.run();
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/HdfsElasticsearchIT.java b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/HdfsElasticsearchIT.java
new file mode 100644
index 0000000..d5f6a29
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/java/org/apache/streams/example/test/HdfsElasticsearchIT.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.HdfsElasticsearch;
+import org.apache.streams.example.HdfsElasticsearchConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotEquals;
+import static org.testng.AssertJUnit.assertTrue;
+
+/**
+ * HdfsElasticsearchIT is an integration test for HdfsElasticsearch.
+ */
+public class HdfsElasticsearchIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(HdfsElasticsearchIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected HdfsElasticsearchConfiguration testConfiguration;
+  protected Client testClient;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/HdfsElasticsearchIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(HdfsElasticsearchConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getDestination()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getDestination().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    if(indicesExistsResponse.isExists()) {
+      DeleteIndexRequest deleteIndexRequest = Requests.deleteIndexRequest(testConfiguration.getDestination().getIndex());
+      DeleteIndexResponse deleteIndexResponse = testClient.admin().indices().delete(deleteIndexRequest).actionGet();
+      assertTrue(deleteIndexResponse.isAcknowledged());
+    };
+  }
+
+  @Test
+  public void ElasticsearchHdfsIT() throws Exception {
+
+    HdfsElasticsearch restore = new HdfsElasticsearch(testConfiguration);
+
+    restore.run();
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getDestination().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertTrue(indicesExistsResponse.isExists());
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getDestination().getIndex())
+        .setTypes(testConfiguration.getDestination().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    assertEquals(countResponse.getHits().getTotalHits(), 89);
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/ElasticsearchHdfsIT.conf b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/ElasticsearchHdfsIT.conf
new file mode 100644
index 0000000..5ec7b00
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/ElasticsearchHdfsIT.conf
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+source {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  clusterName = "elasticsearch"
+  indexes += "elasticsearch_persist_writer_it"
+  types += "activity"
+}
+destination {
+  fields = ["ID","DOC"]
+  scheme = file
+  user = hadoop
+  path = "target/test-classes"
+  writerPath = "elasticsearch_hdfs_it"
+}
+taskTimeoutMs = 60000
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/HdfsElasticsearchIT.conf b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/HdfsElasticsearchIT.conf
new file mode 100644
index 0000000..3eb0b0f
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/HdfsElasticsearchIT.conf
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+source {
+  fields = ["ID","DOC"]
+  scheme = file
+  user = hadoop
+  path = "target/test-classes"
+  readerPath = "elasticsearch_hdfs_it"
+}
+destination {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  clusterName = "elasticsearch"
+  index = "hdfs_elasticsearch_it"
+  type = "activity"
+  refresh = true
+  forceUseConfig = true
+}
+taskTimeoutMs = 60000
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/log4j.properties b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/log4j.properties
new file mode 100644
index 0000000..7d946e4
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/log4j.properties
@@ -0,0 +1,26 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Root logger option
+log4j.rootLogger=DEBUG, stdout
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/logback.xml b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/logback.xml
new file mode 100644
index 0000000..cdce0c0
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/logback.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<configuration debug="true" scan="true" scanPeriod="5 seconds">
+
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <append>true</append>
+        <!-- encoders are assigned the type
+             ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
+        <encoder>
+            <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+        </encoder>
+    </appender>
+
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file>target/logback.txt</file>
+        <append>true</append>
+        <!-- encoders are assigned the type
+             ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
+        <encoder>
+            <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+        </encoder>
+    </appender>
+
+    <root level="DEBUG">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="FILE" />
+    </root>
+
+</configuration>
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testBackup.json b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testBackup.json
new file mode 100644
index 0000000..06268e4
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testBackup.json
@@ -0,0 +1,26 @@
+{
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "source": {
+      "hosts": [
+          "localhost"
+      ],
+      "port": 9300,
+      "clusterName": "elasticsearch",
+      "indexes": [
+          "source"
+      ],
+      "types": [
+          "activity"
+      ]
+  },
+  "destination": {
+    "scheme": "file",
+    "host": "localhost",
+    "user": "cloudera",
+    "path": "target",
+    "writerPath": "test",
+    "writerFilePrefix": "activities"
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testRestore.json b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testRestore.json
new file mode 100644
index 0000000..749ec25
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testRestore.json
@@ -0,0 +1,22 @@
+{
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "source": {
+    "scheme": "file",
+    "host": "localhost",
+    "user": "cloudera",
+    "path": "target",
+    "readerPath": "test"
+  },
+  "destination": {
+      "hosts": [
+          "localhost"
+      ],
+      "port": 9300,
+      "clusterName": "elasticsearch",
+      "index": "destination",
+      "type": "activity",
+      "forceUseConfig": true
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testng.xml b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testng.xml
new file mode 100644
index 0000000..c52395f
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-hdfs/src/test/resources/testng.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
+
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<suite name="ExampleITs">
+
+    <test name="ElasticsearchHdfsIT">
+        <classes>
+            <class name="org.apache.streams.example.test.ElasticsearchHdfsIT" />
+        </classes>
+    </test>
+
+    <test name="HdfsElasticsearchIT">
+        <classes>
+            <class name="org.apache.streams.example.test.HdfsElasticsearchIT" />
+        </classes>
+    </test>
+
+</suite>  
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/README.md b/streams-examples/streams-examples-local/elasticsearch-reindex/README.md
new file mode 100644
index 0000000..b6653da
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:elasticsearch-reindex
+========================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/pom.xml b/streams-examples/streams-examples-local/elasticsearch-reindex/pom.xml
new file mode 100644
index 0000000..22801a9
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/pom.xml
@@ -0,0 +1,278 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>..</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-elasticsearch-reindex</artifactId>
+    <name>elasticsearch-reindex</name>
+
+    <description>Copies documents between indexes.</description>
+
+    <properties>
+        <docker.repo>apachestreams</docker.repo>
+        <elasticsearch.version>2.4.6</elasticsearch.version>
+        <lucene.version>5.5.4</lucene.version>
+    </properties>
+
+    <dependencies>
+    <!-- Test includes -->
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-codecs</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${elasticsearch.version}</version>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-util</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-schema-activitystreams</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.example.elasticsearch</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-elasticsearch</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <includes>**/*.json,**/*.conf</includes>
+                    <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                    <includeGroupIds>org.apache.streams</includeGroupIds>
+                    <includeTypes>test-jar</includeTypes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test-resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <configuration>
+                    <!-- Run integration test suite rather than individual tests. -->
+                    <suiteXmlFiles>
+                        <suiteXmlFile>target/test-classes/testng.xml</suiteXmlFile>
+                    </suiteXmlFiles>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/java/org/apache/streams/example/ElasticsearchReindex.java b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/java/org/apache/streams/example/ElasticsearchReindex.java
new file mode 100644
index 0000000..a1ac353
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/java/org/apache/streams/example/ElasticsearchReindex.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.elasticsearch.ElasticsearchPersistReader;
+import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Copies documents from the source index to the destination index.
+ */
+public class ElasticsearchReindex implements Runnable {
+
+  public final static String STREAMS_ID = "ElasticsearchReindex";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchReindex.class);
+
+  ElasticsearchReindexConfiguration config;
+
+  public ElasticsearchReindex() {
+    this(new ComponentConfigurator<>(ElasticsearchReindexConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+
+  }
+
+  public ElasticsearchReindex(ElasticsearchReindexConfiguration reindex) {
+    this.config = reindex;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+
+    ElasticsearchReindex reindex = new ElasticsearchReindex();
+
+    new Thread(reindex).start();
+
+  }
+
+  @Override
+  public void run() {
+
+    ElasticsearchPersistReader elasticsearchPersistReader = new ElasticsearchPersistReader(config.getSource());
+
+    ElasticsearchPersistWriter elasticsearchPersistWriter = new ElasticsearchPersistWriter(config.getDestination());
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(ElasticsearchPersistReader.class.getCanonicalName(), elasticsearchPersistReader);
+    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.class.getCanonicalName(), elasticsearchPersistWriter, 1, ElasticsearchPersistReader.class.getCanonicalName());
+    builder.start();
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/jsonschema/ElasticsearchReindexConfiguration.json b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/jsonschema/ElasticsearchReindexConfiguration.json
new file mode 100644
index 0000000..09bdf5b
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/jsonschema/ElasticsearchReindexConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.ElasticsearchReindexConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "source": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchReaderConfiguration", "type": "object", "required": true },
+    "destination": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/ElasticsearchReindex.dot b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/ElasticsearchReindex.dot
new file mode 100644
index 0000000..69ad1f6
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/ElasticsearchReindex.dot
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  ElasticsearchPersistReader [label="ElasticsearchPersistReader",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java"];
+
+  //persisters
+  ElasticsearchPersistWriter [label="ElasticsearchPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+
+  //data
+  source [label="es://{indexes}/{types}",shape=box];
+  destination [label="es://{index}/{type}",shape=box];
+
+  //stream
+  source -> ElasticsearchPersistReader
+  ElasticsearchPersistReader -> ElasticsearchPersistWriter [label="String"];
+  ElasticsearchPersistWriter -> destination
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/application.json b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/application.json
new file mode 100644
index 0000000..ac5259a
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/application.json
@@ -0,0 +1,29 @@
+{
+    "$license": [
+        "http://www.apache.org/licenses/LICENSE-2.0"
+    ],
+    "source": {
+        "hosts": [
+            "localhost"
+        ],
+        "port": 9300,
+        "clusterName": "elasticsearch",
+        "indexes": [
+            "activity"
+        ],
+        "types": [
+            "activity"
+        ],
+        "forceUseConfig": true
+    },
+    "destination": {
+        "hosts": [
+            "localhost"
+        ],
+        "port": 9300,
+        "clusterName": "elasticsearch",
+        "index": "activity2",
+        "type": "activity",
+        "forceUseConfig": true
+    }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/log4j.properties b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/log4j.properties
new file mode 100644
index 0000000..7d946e4
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/log4j.properties
@@ -0,0 +1,26 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Root logger option
+log4j.rootLogger=DEBUG, stdout
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/logback.xml b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/logback.xml
new file mode 100644
index 0000000..cdce0c0
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/main/resources/logback.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<configuration debug="true" scan="true" scanPeriod="5 seconds">
+
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <append>true</append>
+        <!-- encoders are assigned the type
+             ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
+        <encoder>
+            <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+        </encoder>
+    </appender>
+
+    <appender name="FILE" class="ch.qos.logback.core.FileAppender">
+        <file>target/logback.txt</file>
+        <append>true</append>
+        <!-- encoders are assigned the type
+             ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
+        <encoder>
+            <pattern>%-4relative [%thread] %-5level %logger{35} - %msg%n</pattern>
+        </encoder>
+    </appender>
+
+    <root level="DEBUG">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="FILE" />
+    </root>
+
+</configuration>
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/ElasticsearchReindex.md b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/ElasticsearchReindex.md
new file mode 100644
index 0000000..9bd37d4
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/ElasticsearchReindex.md
@@ -0,0 +1,49 @@
+### ElasticsearchReindex
+
+#### Description:
+
+Copies documents into a different index
+
+#### Configuration:
+
+[ElasticsearchReindex.json](ElasticsearchReindex.json "ElasticsearchReindex.json")
+
+##### application.conf
+
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    source = ${elasticsearch}
+    source {
+       indexes += "elasticsearch_persist_writer_it"
+       types += "activity"
+    }
+    destination = ${elasticsearch}
+    destination {
+       index: "elasticsearch_reindex_it",
+       type: "activity",
+       forceUseConfig": true
+    }
+    
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "elasticsearch-reindex" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.ElasticsearchReindex
+
+#### Run (Docker):
+
+    docker run elasticsearch-reindex java -cp elasticsearch-reindex-jar-with-dependencies.jar -Dconfig.file=./application.conf org.apache.streams.example.ElasticsearchReindex
+
+#### Specification:
+
+[ElasticsearchReindex.dot](ElasticsearchReindex.dot "ElasticsearchReindex.dot" )
+
+#### Diagram:
+
+![ElasticsearchReindex.dot.svg](./ElasticsearchReindex.dot.svg)
+
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/index.md b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/index.md
new file mode 100644
index 0000000..66e4a92
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/markdown/index.md
@@ -0,0 +1,32 @@
+### elasticsearch-reindex
+
+#### Requirements:
+ - A running ElasticSearch 2.0.0+ cluster
+ - Transport client access to cluster
+ - elasticsearch.version and lucene.version set to match cluster
+
+#### Streams:
+
+<a href="ElasticsearchReindex.html" target="_self">ElasticsearchReindex</a>
+
+#### Build:
+
+    mvn clean install
+
+#### Testing:
+
+Start up elasticsearch with docker:
+     
+    mvn -PdockerITs docker:start
+ 
+Build with integration testing enabled:
+ 
+    mvn clean test verify -DskipITs=false
+ 
+Shutdown elasticsearch when finished:
+ 
+    mvn -PdockerITs docker:stop
+    
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/site.xml b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/site.xml
new file mode 100644
index 0000000..5fb3b4d
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/site/site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Services">
+            <item name="Elasticsearch" href="../../../streams-project/services/elasticsearch.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexChildIT.java b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexChildIT.java
new file mode 100644
index 0000000..04d7cd6
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexChildIT.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.ElasticsearchReindex;
+import org.apache.streams.example.ElasticsearchReindexConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertNotEquals;
+
+/**
+ * Test copying parent/child associated documents between two indexes on same cluster.
+ */
+public class ElasticsearchReindexChildIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchReindexIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected ElasticsearchReindexConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchReindexChildIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchReindexConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getSource()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getSource().getIndexes().get(0));
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertThat(indicesExistsResponse.isExists(), is(true));
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getSource().getIndexes().get(0))
+        .setTypes(testConfiguration.getSource().getTypes().get(0));
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    assertNotEquals(count, 0);
+
+  }
+
+  @Test
+  public void testReindex() throws Exception {
+
+    ElasticsearchReindex reindex = new ElasticsearchReindex(testConfiguration);
+
+    reindex.run();
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getDestination().getIndex())
+        .setTypes(testConfiguration.getDestination().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    assertThat((int)countResponse.getHits().getTotalHits(), is(count));
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexIT.java b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexIT.java
new file mode 100644
index 0000000..6c69388
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexIT.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.ElasticsearchReindex;
+import org.apache.streams.example.ElasticsearchReindexConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNot.not;
+
+/**
+ * Test copying documents between two indexes on same cluster
+ */
+public class ElasticsearchReindexIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchReindexIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected ElasticsearchReindexConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchReindexIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchReindexConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getSource()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertThat(clusterHealthResponse.getStatus(), not(ClusterHealthStatus.RED));
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getSource().getIndexes().get(0));
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertThat(indicesExistsResponse.isExists(), is(true));
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getSource().getIndexes().get(0))
+        .setTypes(testConfiguration.getSource().getTypes().get(0));
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    assertThat(count, not(0));
+
+  }
+
+  @Test
+  public void testReindex() throws Exception {
+
+    ElasticsearchReindex reindex = new ElasticsearchReindex(testConfiguration);
+
+    reindex.run();
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getDestination().getIndex())
+        .setTypes(testConfiguration.getDestination().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    assertThat((int)countResponse.getHits().getTotalHits(), is(count));
+
+  }
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexParentIT.java b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexParentIT.java
new file mode 100644
index 0000000..e53c057
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/java/org/apache/streams/example/test/ElasticsearchReindexParentIT.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.elasticsearch.test.ElasticsearchParentChildWriterIT;
+import org.apache.streams.example.ElasticsearchReindex;
+import org.apache.streams.example.ElasticsearchReindexConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.net.URL;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNot.not;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * Test copying parent/child associated documents between two indexes on same cluster
+ */
+public class ElasticsearchReindexParentIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchReindexIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected ElasticsearchReindexConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchReindexParentIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchReindexConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getSource()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertThat(clusterHealthResponse.getStatus(), not(ClusterHealthStatus.RED));
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getSource().getIndexes().get(0));
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertTrue(indicesExistsResponse.isExists());
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getSource().getIndexes().get(0))
+        .setTypes(testConfiguration.getSource().getTypes().get(0));
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    PutIndexTemplateRequestBuilder putTemplateRequestBuilder = testClient.admin().indices().preparePutTemplate("mappings");
+    URL templateURL = ElasticsearchParentChildWriterIT.class.getResource("/ActivityChildObjectParent.json");
+    ObjectNode template = MAPPER.readValue(templateURL, ObjectNode.class);
+    String templateSource = MAPPER.writeValueAsString(template);
+    putTemplateRequestBuilder.setSource(templateSource);
+
+    testClient.admin().indices().putTemplate(putTemplateRequestBuilder.request()).actionGet();
+
+    assertThat(count, not(0));
+
+  }
+
+  @Test
+  public void testReindex() throws Exception {
+
+    ElasticsearchReindex reindex = new ElasticsearchReindex(testConfiguration);
+
+    reindex.run();
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getDestination().getIndex())
+        .setTypes(testConfiguration.getDestination().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    assertThat((int)countResponse.getHits().getTotalHits(), is(count));
+
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexChildIT.conf b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexChildIT.conf
new file mode 100644
index 0000000..43dec59
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexChildIT.conf
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+"source": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "indexes": [
+    "elasticsearch_persist_writer_parent_child_it"
+  ],
+  "types": [
+    "activity"
+  ]
+},
+"destination": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "index": "elasticsearch_reindex_parent_child_it",
+  "type": "activity",
+  "forceUseConfig": true
+}
+taskTimeoutMs = 60000
+
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexIT.conf b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexIT.conf
new file mode 100644
index 0000000..ac6f85f
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexIT.conf
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+"source": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "indexes": [
+    "elasticsearch_persist_writer_it"
+  ],
+  "types": [
+    "activity"
+  ]
+},
+"destination": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "index": "elasticsearch_reindex_it",
+  "type": "activity",
+  "forceUseConfig": true
+}
+taskTimeoutMs = 60000
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexParentIT.conf b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexParentIT.conf
new file mode 100644
index 0000000..43dec59
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/ElasticsearchReindexParentIT.conf
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+"source": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "indexes": [
+    "elasticsearch_persist_writer_parent_child_it"
+  ],
+  "types": [
+    "activity"
+  ]
+},
+"destination": {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "index": "elasticsearch_reindex_parent_child_it",
+  "type": "activity",
+  "forceUseConfig": true
+}
+taskTimeoutMs = 60000
+
diff --git a/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/testng.xml b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/testng.xml
new file mode 100644
index 0000000..8b421b6
--- /dev/null
+++ b/streams-examples/streams-examples-local/elasticsearch-reindex/src/test/resources/testng.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
+
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<suite name="ExampleITs">
+
+    <test name="ElasticsearchReindexIT">
+        <classes>
+            <class name="org.apache.streams.example.test.ElasticsearchReindexIT" />
+        </classes>
+    </test>
+
+    <test name="ElasticsearchReindexParentIT">
+        <classes>
+            <class name="org.apache.streams.example.test.ElasticsearchReindexParentIT" />
+        </classes>
+    </test>
+
+    <test name="ElasticsearchReindexChildIT">
+        <classes>
+            <class name="org.apache.streams.example.test.ElasticsearchReindexChildIT" />
+        </classes>
+    </test>
+
+</suite>
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/README.md b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/README.md
new file mode 100644
index 0000000..e74196c
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:mongo-elasticsearch-sync
+===========================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/pom.xml b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/pom.xml
new file mode 100644
index 0000000..c578378
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/pom.xml
@@ -0,0 +1,282 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>..</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-mongo-elasticsearch-sync</artifactId>
+    <name>mongo-elasticsearch-sync</name>
+
+    <description>Copies documents from mongodb to elasticsearch.</description>
+
+    <properties>
+        <docker.repo>apachestreams</docker.repo>
+        <elasticsearch.version>2.4.6</elasticsearch.version>
+        <lucene.version>5.5.4</lucene.version>
+    </properties>
+
+    <dependencies>
+        <!-- Test includes -->
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-codecs</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${elasticsearch.version}</version>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-mongo</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-mongo</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-schema-activitystreams</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.example.elasticsearch</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-elasticsearch</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-mongo</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <includes>**/*.json,**/*.conf</includes>
+                    <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                    <includeGroupIds>org.apache.streams</includeGroupIds>
+                    <includeTypes>test-jar</includeTypes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test-resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <configuration>
+                    <!-- Run integration test suite rather than individual tests. -->
+                    <suiteXmlFiles>
+                        <suiteXmlFile>target/test-classes/testng.xml</suiteXmlFile>
+                    </suiteXmlFiles>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/java/org/apache/streams/example/MongoElasticsearchSync.java b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/java/org/apache/streams/example/MongoElasticsearchSync.java
new file mode 100644
index 0000000..58001d6
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/java/org/apache/streams/example/MongoElasticsearchSync.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+import org.apache.streams.mongo.MongoPersistReader;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Copies a mongo collection to an elasticsearch index.
+ */
+public class MongoElasticsearchSync implements Runnable {
+
+  public final static String STREAMS_ID = "MongoElasticsearchSync";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(MongoElasticsearchSync.class);
+
+  MongoElasticsearchSyncConfiguration config;
+
+  public MongoElasticsearchSync() {
+    this(new ComponentConfigurator<MongoElasticsearchSyncConfiguration>(MongoElasticsearchSyncConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+  }
+
+  public MongoElasticsearchSync(MongoElasticsearchSyncConfiguration config) {
+    this.config = config;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+
+    MongoElasticsearchSync sync = new MongoElasticsearchSync();
+
+    new Thread(sync).start();
+
+  }
+
+  @Override
+  public void run() {
+
+    MongoPersistReader mongoPersistReader = new MongoPersistReader(config.getSource());
+
+    ElasticsearchPersistWriter elasticsearchPersistWriter = new ElasticsearchPersistWriter(config.getDestination());
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(MongoPersistReader.class.getCanonicalName(), mongoPersistReader);
+    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.class.getCanonicalName(), elasticsearchPersistWriter, 1, MongoPersistReader.class.getCanonicalName());
+    builder.start();
+  }
+}
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/jsonschema/MongoElasticsearchSyncConfiguration.json b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/jsonschema/MongoElasticsearchSyncConfiguration.json
new file mode 100644
index 0000000..0065468
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/jsonschema/MongoElasticsearchSyncConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.MongoElasticsearchSyncConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "source": { "javaType": "org.apache.streams.mongo.MongoConfiguration", "type": "object", "required": true },
+    "destination": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/resources/MongoElasticsearchSync.dot b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/resources/MongoElasticsearchSync.dot
new file mode 100644
index 0000000..538af37
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/main/resources/MongoElasticsearchSync.dot
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  MongoPersistReader [label="MongoPersistReader",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java"];
+
+  //persisters
+  ElasticsearchPersistWriter [label="ElasticsearchPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+
+  //data
+  source [label="mongdb://{db}/{collection}",shape=box];
+  destination [label="es://{index}/{type}",shape=box];
+ 
+  //stream
+  source -> MongoPersistReader [label="ObjectNode"];
+  MongoPersistReader -> ElasticsearchPersistWriter [label="ObjectNode"];
+  ElasticsearchPersistWriter -> destination [label="ObjectNode"];
+
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/MongoElasticsearchSync.md b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/MongoElasticsearchSync.md
new file mode 100644
index 0000000..d1f8f8d
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/MongoElasticsearchSync.md
@@ -0,0 +1,50 @@
+### MongoElasticsearchSync
+
+#### Description:
+
+Copies documents from mongodb to elasticsearch
+
+#### Configuration:
+
+[MongoElasticsearchSync.json](MongoElasticsearchSync.json "MongoElasticsearchSync.json") for _
+
+##### application.conf
+
+    include "mongo.properties"
+    include "mongo.conf"
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    source = ${mongo}
+    source {
+      db: mongo_persist_it
+      collection: activity
+    }
+    destination = ${elasticsearch}
+    destination {
+      index: mongo_elasticsearch_sync_it
+      type: activity
+      forceUseConfig": true
+    }
+
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "mongo-elasticsearch-sync" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.MongoElasticsearchSync
+
+#### Run (Docker):
+
+    docker run apachestreams/mongo-elasticsearch-sync java -cp mongo-elasticsearch-sync-jar-with-dependencies.jar org.apache.streams.example.MongoElasticsearchSync
+
+#### Specification:
+
+[MongoElasticsearchSync.dot](MongoElasticsearchSync.dot "MongoElasticsearchSync.dot" )
+
+#### Diagram:
+
+![MongoElasticsearchSync.dot.svg](./MongoElasticsearchSync.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/index.md b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/index.md
new file mode 100644
index 0000000..526375b
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/markdown/index.md
@@ -0,0 +1,31 @@
+### mongo-elasticsearch-sync
+
+#### Requirements:
+ - A running MongoDB 2.4+ instance
+ - A running ElasticSearch 2.0+ instance
+
+#### Streams:
+
+<a href="MongoElasticsearchSync.html" target="_self">MongoElasticsearchSync</a>
+
+#### Build:
+
+    mvn clean package
+
+#### Test:
+
+Start up elasticsearch and mongodb with docker:
+    
+    mvn -PdockerITs docker:start
+
+Build with integration testing enabled:
+
+    mvn clean test verify -DskipITs=false
+
+Shutdown elasticsearch and mongodb when finished:
+
+    mvn -PdockerITs docker:stop
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/site.xml b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/site.xml
new file mode 100644
index 0000000..ddf50f2
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/site/site.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Services">
+            <item name="Elasticsearch" href="../../../streams-project/services/elasticsearch.html"/>
+            <item name="Mongo" href="../../../streams-project/services/mongo.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/java/org/apache/streams/example/test/MongoElasticsearchSyncIT.java b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/java/org/apache/streams/example/test/MongoElasticsearchSyncIT.java
new file mode 100644
index 0000000..84d0fba
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/java/org/apache/streams/example/test/MongoElasticsearchSyncIT.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.MongoElasticsearchSync;
+import org.apache.streams.example.MongoElasticsearchSyncConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertNotEquals;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * MongoElasticsearchSyncIT is an integration test for MongoElasticsearchSync.
+ */
+public class MongoElasticsearchSyncIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(MongoElasticsearchSyncIT.class);
+
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected MongoElasticsearchSyncConfiguration testConfiguration;
+  protected Client testClient;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/MongoElasticsearchSyncIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(MongoElasticsearchSyncConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getDestination()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getDestination().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertFalse(indicesExistsResponse.isExists());
+  }
+
+  @Test
+  public void testSync() throws Exception {
+
+    MongoElasticsearchSync sync = new MongoElasticsearchSync(testConfiguration);
+
+    sync.run();
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getDestination().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertTrue(indicesExistsResponse.isExists());
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getDestination().getIndex())
+        .setTypes(testConfiguration.getDestination().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    assertEquals(89, (int)countResponse.getHits().getTotalHits());
+
+  }
+}
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/MongoElasticsearchSyncIT.conf b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/MongoElasticsearchSyncIT.conf
new file mode 100644
index 0000000..ca2ce25
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/MongoElasticsearchSyncIT.conf
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../elasticsearch.properties"
+include "../../../../../mongo.properties"
+"source": {
+  host = ${mongo.tcp.host}
+  port = ${mongo.tcp.port}
+  "db": "mongo_persist_it",
+  "collection": "activity"
+},
+"destination": {
+  hosts = [
+      ${es.tcp.host}
+  ]
+  port = ${es.tcp.port}
+  "clusterName": "elasticsearch",
+  "index": "mongo_elasticsearch_sync_it",
+  "type": "activity",
+  "forceUseConfig": true
+}
+taskTimeoutMs = 60000
+
diff --git a/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/testng.xml b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/testng.xml
new file mode 100644
index 0000000..e8f8233
--- /dev/null
+++ b/streams-examples/streams-examples-local/mongo-elasticsearch-sync/src/test/resources/testng.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
+
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<suite name="ExampleITs">
+
+    <test name="MongoElasticsearchSyncIT">
+        <classes>
+            <class name="org.apache.streams.example.test.MongoElasticsearchSyncIT" />
+        </classes>
+    </test>
+
+</suite>
diff --git a/streams-examples/streams-examples-local/pom.xml b/streams-examples/streams-examples-local/pom.xml
new file mode 100644
index 0000000..11bd321
--- /dev/null
+++ b/streams-examples/streams-examples-local/pom.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>streams-examples</artifactId>
+        <groupId>org.apache.streams.examples</groupId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>streams-examples-local</artifactId>
+
+    <packaging>pom</packaging>
+    <name>streams-examples-local</name>
+
+    <description>Contributed examples of use cases for Streams using streams-runtime-local</description>
+
+    <properties>
+
+    </properties>
+
+    <modules>
+        <module>elasticsearch-hdfs</module>
+        <module>elasticsearch-reindex</module>
+        <module>mongo-elasticsearch-sync</module>
+        <!--<module>twitter-follow-neo4j</module>-->
+        <module>twitter-history-elasticsearch</module>
+        <module>twitter-userstream-elasticsearch</module>
+    </modules>
+
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/README.md b/streams-examples/streams-examples-local/twitter-follow-neo4j/README.md
new file mode 100644
index 0000000..3e63a53
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:twitter-follow-graph
+=======================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/pom.xml b/streams-examples/streams-examples-local/twitter-follow-neo4j/pom.xml
new file mode 100644
index 0000000..2e47cf6
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/pom.xml
@@ -0,0 +1,256 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+        <relativePath>..</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-twitter-follow-neo4j</artifactId>
+    <name>twitter-follow-neo4j</name>
+
+    <description>
+        Collects friend or follower connections for a set of twitter users to build a graph database in neo4j.
+    </description>
+
+    <properties>
+        <docker.repo>apachestreams</docker.repo>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-provider-twitter</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-neo4j</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-pojo</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.example.twitter</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-neo4j</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-provider-twitter</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <version>2.4</version>
+                <executions>
+                    <execution>
+                        <id>resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                        <configuration>
+                            <includeArtifactIds>streams-schemas-activitystreams</includeArtifactIds>
+                            <includes>**/*.json</includes>
+                            <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/java/org/apache/streams/example/TwitterFollowNeo4j.java b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/java/org/apache/streams/example/TwitterFollowNeo4j.java
new file mode 100644
index 0000000..7a3ff46
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/java/org/apache/streams/example/TwitterFollowNeo4j.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.converter.ActivityConverterProcessor;
+import org.apache.streams.converter.ActivityConverterProcessorConfiguration;
+import org.apache.streams.converter.TypeConverterProcessor;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.data.ActivityConverter;
+import org.apache.streams.data.DocumentClassifier;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+import org.apache.streams.neo4j.Neo4jConfiguration;
+import org.apache.streams.neo4j.bolt.Neo4jBoltPersistWriter;
+import org.apache.streams.twitter.TwitterFollowingConfiguration;
+import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
+import org.apache.streams.twitter.converter.TwitterFollowActivityConverter;
+import org.apache.streams.twitter.provider.TwitterFollowingProvider;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Collects friend and follow connections for a set of twitter users and builds a graph
+ * database in neo4j.
+ */
+public class TwitterFollowNeo4j implements Runnable {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterFollowNeo4j.class);
+
+  private TwitterFollowNeo4jConfiguration config;
+
+  public TwitterFollowNeo4j() {
+    this(new ComponentConfigurator<>(TwitterFollowNeo4jConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+  }
+
+  public TwitterFollowNeo4j(TwitterFollowNeo4jConfiguration config) {
+    this.config = config;
+  }
+
+  public void run() {
+
+    TwitterFollowingConfiguration twitterFollowingConfiguration = config.getTwitter();
+    TwitterFollowingProvider followingProvider = new TwitterFollowingProvider(twitterFollowingConfiguration);
+    TypeConverterProcessor converter = new TypeConverterProcessor(String.class);
+
+    List<DocumentClassifier> classifiers = Stream.of((DocumentClassifier) new TwitterDocumentClassifier()).collect(Collectors.toList());
+    List<ActivityConverter> converters = Stream.of((ActivityConverter) new TwitterFollowActivityConverter()).collect(Collectors.toList());
+    ActivityConverterProcessorConfiguration activityConverterProcessorConfiguration =
+        new ActivityConverterProcessorConfiguration()
+            .withClassifiers(classifiers)
+            .withConverters(converters);
+    ActivityConverterProcessor activity = new ActivityConverterProcessor(activityConverterProcessorConfiguration);
+
+    Neo4jConfiguration neo4jConfiguration = config.getNeo4j();
+    Neo4jBoltPersistWriter graphPersistWriter = new Neo4jBoltPersistWriter(neo4jConfiguration);
+    graphPersistWriter.prepare(neo4jConfiguration);
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(TwitterFollowingProvider.class.getCanonicalName(), followingProvider);
+    builder.addStreamsProcessor(TypeConverterProcessor.class.getCanonicalName(), converter, 1, TwitterFollowingProvider.class.getCanonicalName());
+    builder.addStreamsProcessor(ActivityConverterProcessor.class.getCanonicalName(), activity, 1, TypeConverterProcessor.class.getCanonicalName());
+    builder.addStreamsPersistWriter(Neo4jBoltPersistWriter.class.getCanonicalName(), graphPersistWriter, 1, ActivityConverterProcessor.class.getCanonicalName());
+
+    builder.start();
+  }
+
+  public static void main(String[] args) {
+
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+
+    TwitterFollowNeo4j stream = new TwitterFollowNeo4j();
+
+    stream.run();
+
+  }
+
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/jsonschema/TwitterFollowNeo4jConfiguration.json b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/jsonschema/TwitterFollowNeo4jConfiguration.json
new file mode 100644
index 0000000..e2b3386
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/jsonschema/TwitterFollowNeo4jConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.TwitterFollowNeo4jConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "twitter": { "javaType": "org.apache.streams.twitter.TwitterFollowingConfiguration", "type": "object", "required": true },
+    "neo4j": { "javaType": "org.apache.streams.neo4j.Neo4jConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/resources/TwitterFollowNeo4j.dot b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/resources/TwitterFollowNeo4j.dot
new file mode 100644
index 0000000..7b519ff
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/main/resources/TwitterFollowNeo4j.dot
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  TwitterFollowingProvider [label="TwitterFollowingProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java"];
+
+  //processors
+  TypeConverterProcessor [label="TypeConverterProcessor",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-components/streams-converters/src/main/java/org/apache/streams/converters/TypeConverterProcessor.java"];
+  ActivityConverterProcessor [label="ActivityConverterProcessor",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-components/streams-converters/src/main/java/org/apache/streams/converters/ActivityConverterProcessor.java"];
+
+  //persisters
+  GraphPersistWriter [label="GraphPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphPersistWriter.java"];
+
+  //data
+  destination [label="http://{host}:{port}/db/data",shape=box];
+
+  //stream
+  TwitterFollowingProvider -> TypeConverterProcessor [label="Follow",URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/jsonschema/com/twitter/Follow.java"];
+  TypeConverterProcessor -> ActivityConverterProcessor [label="String"];
+  ActivityConverterProcessor -> GraphPersistWriter [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  GraphPersistWriter -> destination
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/TwitterFollowNeo4j.md b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/TwitterFollowNeo4j.md
new file mode 100644
index 0000000..c241b60
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/TwitterFollowNeo4j.md
@@ -0,0 +1,45 @@
+### TwitterFollowNeo4j
+
+#### Description:
+
+Collects friend or follower connections for a set of twitter users to build a graph database in neo4j.
+
+#### Configuration:
+
+[TwitterFollowNeo4j.json](TwitterFollowNeo4j.json "TwitterFollowNeo4j.json") for _
+
+##### application.conf
+
+    include "neo4j.properties"
+    include "neo4j.conf"
+    include "twitter.oauth.conf"
+    twitter {
+      endpoint = "friends"
+      info = [
+        18055613
+      ]
+      twitter.max_items = 1000
+    }
+
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "twitter-follow-neo4j" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.TwitterFollowNeo4j
+
+#### Run (Docker):
+
+    docker run apachestreams/twitter-follow-neo4j java -cp twitter-follow-neo4j-jar-with-dependencies.jar org.apache.streams.example.TwitterFollowNeo4j
+
+#### Specification:
+
+[TwitterFollowNeo4j.dot](TwitterFollowNeo4j.dot "TwitterFollowNeo4j.dot" )
+
+#### Diagram:
+
+![TwitterFollowNeo4j.dot.svg](./TwitterFollowNeo4j.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/index.md b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/index.md
new file mode 100644
index 0000000..aad8305
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/markdown/index.md
@@ -0,0 +1,31 @@
+### twitter-follow-neo4j
+
+#### Requirements:
+ - Authorized Twitter API credentials
+ - A running Neo4J 3.0.0+ instance
+
+#### Streams:
+
+<a href="TwitterFollowNeo4j.html" target="_self">TwitterFollowNeo4j</a>
+
+#### Build:
+
+    mvn clean package    
+
+#### Test:
+
+Start up neo4j with docker:
+
+    mvn -PdockerITs docker:start
+    
+Build with integration testing enabled, using your credentials
+
+    mvn clean test verify -DskipITs=false -DargLine="-Dconfig.file=twitter.oauth.conf"
+
+Shutdown neo4j when finished:
+
+    mvn -PdockerITs docker:stop
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/site.xml b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/site.xml
new file mode 100644
index 0000000..e237768
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/site/site.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Credentials">
+            <item name="Twitter" href="../../../streams-project/credentials/twitter.html"/>
+        </menu>
+        <menu name="Services">
+            <item name="Neo4j" href="../../../streams-project/services/neo4j.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/java/org/apache/streams/example/test/TwitterFollowNeo4jIT.java b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/java/org/apache/streams/example/test/TwitterFollowNeo4jIT.java
new file mode 100644
index 0000000..75e56bb
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/java/org/apache/streams/example/test/TwitterFollowNeo4jIT.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.example.TwitterFollowNeo4j;
+import org.apache.streams.example.TwitterFollowNeo4jConfiguration;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.neo4j.Neo4jReaderConfiguration;
+import org.apache.streams.neo4j.bolt.Neo4jBoltClient;
+import org.apache.streams.neo4j.bolt.Neo4jBoltPersistReader;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
+import org.neo4j.driver.v1.Session;
+import org.neo4j.driver.v1.Transaction;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+import static org.testng.Assert.assertTrue;
+
+/**
+ * TwitterFollowNeo4jIT is an integration test for TwitterFollowNeo4j.
+ */
+public class TwitterFollowNeo4jIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterFollowNeo4jIT.class);
+
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected TwitterFollowNeo4jConfiguration testConfiguration;
+
+  private int count = 0;
+
+  private Neo4jBoltClient testClient;
+
+  @BeforeClass
+  public void prepareTest() throws IOException {
+
+    Config reference  = ConfigFactory.load();
+    File conf = new File("target/test-classes/TwitterFollowNeo4jIT.conf");
+    assertTrue(conf.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(TwitterFollowNeo4jConfiguration.class).detectConfiguration(typesafe);
+    testClient = Neo4jBoltClient.getInstance(testConfiguration.getNeo4j());
+
+    Session session = testClient.client().session();
+    Transaction transaction = session.beginTransaction();
+    transaction.run("MATCH ()-[r]-() DELETE r");
+    transaction.run("MATCH (n) DETACH DELETE n");
+    transaction.success();
+    session.close();
+  }
+
+  @Test
+  public void testTwitterFollowGraph() throws Exception {
+
+    TwitterFollowNeo4j stream = new TwitterFollowNeo4j(testConfiguration);
+
+    stream.run();
+
+    Neo4jReaderConfiguration vertexReaderConfiguration= MAPPER.convertValue(testConfiguration.getNeo4j(), Neo4jReaderConfiguration.class);
+    vertexReaderConfiguration.setQuery("MATCH (v) return v");
+    Neo4jBoltPersistReader vertexReader = new Neo4jBoltPersistReader(vertexReaderConfiguration);
+    vertexReader.prepare(null);
+    StreamsResultSet vertexResultSet = vertexReader.readAll();
+    LOGGER.info("Total Read: {}", vertexResultSet.size() );
+    assertTrue(vertexResultSet.size() > 100);
+
+    Neo4jReaderConfiguration edgeReaderConfiguration= MAPPER.convertValue(testConfiguration.getNeo4j(), Neo4jReaderConfiguration.class);
+    edgeReaderConfiguration.setQuery("MATCH (s)-[r]->(d) return r");
+    Neo4jBoltPersistReader edgeReader = new Neo4jBoltPersistReader(edgeReaderConfiguration);
+    edgeReader.prepare(null);
+    StreamsResultSet edgeResultSet = edgeReader.readAll();
+    LOGGER.info("Total Read: {}", edgeResultSet.size() );
+    assertTrue(edgeResultSet.size() == vertexResultSet.size()-1);
+
+  }
+
+
+  @AfterClass
+  public void cleanup() throws Exception {
+    Session session = testClient.client().session();
+    Transaction transaction = session.beginTransaction();
+    transaction.run("MATCH ()-[r]-() DELETE r");
+    transaction.run("MATCH (n) DETACH DELETE n");
+    transaction.success();
+    session.close();
+  }
+
+
+}
diff --git a/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/resources/TwitterFollowNeo4jIT.conf b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/resources/TwitterFollowNeo4jIT.conf
new file mode 100644
index 0000000..d54a50c
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-follow-neo4j/src/test/resources/TwitterFollowNeo4jIT.conf
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+include "../../../../../neo4j.properties"
+twitter {
+  debug = true
+  ids_only = false
+  endpoint = "friends"
+  info = [
+    18055613
+  ]
+  twitter.max_items = 200
+}
+neo4j {
+  scheme = "tcp"
+  hosts += ${neo4j.tcp.host}
+  port = ${neo4j.tcp.port}
+}
+taskTimeoutMs = 60000
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/README.md b/streams-examples/streams-examples-local/twitter-history-elasticsearch/README.md
new file mode 100644
index 0000000..5b0b3ec
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:twitter-history-elasticsearch
+================================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/pom.xml b/streams-examples/streams-examples-local/twitter-history-elasticsearch/pom.xml
new file mode 100644
index 0000000..d3ed523
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/pom.xml
@@ -0,0 +1,284 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streams-twitter-history-elasticsearch</artifactId>
+    <name>twitter-history-elasticsearch</name>
+
+    <description>
+        Retrieves as many posts from a known list of users as twitter API allows.
+
+        Converts them to activities, and writes them in activity format to Elasticsearch.
+    </description>
+
+    <properties>
+        <docker.repo>apachestreams</docker.repo>
+        <elasticsearch.version>2.4.6</elasticsearch.version>
+        <lucene.version>5.5.4</lucene.version>
+    </properties>
+
+    <dependencies>
+        <!-- Test includes -->
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-codecs</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${elasticsearch.version}</version>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-util</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-pojo</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-provider-twitter</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>commons-logging</groupId>
+                    <artifactId>commons-logging</artifactId>
+                </exclusion>
+            </exclusions>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>log4j-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jcl-over-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <!-- This binary runs with logback -->
+            <!-- Keep log4j out -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>1.3.1</version>
+                <executions>
+                    <execution>
+                        <id>enforce-banned-dependencies</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>org.slf4j:slf4j-log4j12</exclude>
+                                        <exclude>org.slf4j:slf4j-jcl</exclude>
+                                        <exclude>org.slf4j:slf4j-jdk14</exclude>
+                                        <exclude>org.log4j:log4j</exclude>
+                                        <exclude>commons-logging:commons-logging</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                            <fail>true</fail>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.example.twitter</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-elasticsearch</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-provider-twitter</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <includes>**/*.json</includes>
+                    <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                    <includeGroupIds>org.apache.streams</includeGroupIds>
+                    <includeTypes>test-jar</includeTypes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test-resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/java/org/apache/streams/example/TwitterHistoryElasticsearch.java b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/java/org/apache/streams/example/TwitterHistoryElasticsearch.java
new file mode 100644
index 0000000..d4fd976
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/java/org/apache/streams/example/TwitterHistoryElasticsearch.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.converter.ActivityConverterProcessor;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+import org.apache.streams.twitter.provider.TwitterTimelineProvider;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Retrieves as many posts from a known list of users as twitter API allows.
+ *
+ * Converts them to activities, and writes them in activity format to Elasticsearch.
+ */
+public class TwitterHistoryElasticsearch implements Runnable {
+
+  public final static String STREAMS_ID = "TwitterHistoryElasticsearch";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterHistoryElasticsearch.class);
+
+  private static final ObjectMapper mapper = new ObjectMapper();
+
+  TwitterHistoryElasticsearchConfiguration config;
+
+  public TwitterHistoryElasticsearch() {
+    this(new ComponentConfigurator<>(TwitterHistoryElasticsearchConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+  }
+
+  public TwitterHistoryElasticsearch(TwitterHistoryElasticsearchConfiguration config) {
+    this.config = config;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+
+    TwitterHistoryElasticsearch history = new TwitterHistoryElasticsearch();
+
+    new Thread(history).start();
+
+  }
+
+
+  public void run() {
+
+    TwitterTimelineProvider provider = new TwitterTimelineProvider(config.getTwitter());
+    ActivityConverterProcessor converter = new ActivityConverterProcessor();
+    ElasticsearchPersistWriter writer = new ElasticsearchPersistWriter(config.getElasticsearch());
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(TwitterTimelineProvider.class.getCanonicalName(), provider);
+    builder.addStreamsProcessor(ActivityConverterProcessor.class.getCanonicalName(), converter, 2, TwitterTimelineProvider.class.getCanonicalName());
+    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.class.getCanonicalName(), writer, 1, ActivityConverterProcessor.class.getCanonicalName());
+    builder.start();
+  }
+}
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterHistoryElasticsearchConfiguration.json b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterHistoryElasticsearchConfiguration.json
new file mode 100644
index 0000000..1886325
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterHistoryElasticsearchConfiguration.json
@@ -0,0 +1,13 @@
+{
+    "$schema": "http://json-schema.org/draft-03/schema",
+    "$license": [
+        "http://www.apache.org/licenses/LICENSE-2.0"
+    ],
+    "type": "object",
+    "javaType" : "org.apache.streams.example.TwitterHistoryElasticsearchConfiguration",
+    "javaInterfaces": ["java.io.Serializable"],
+    "properties": {
+        "twitter": { "javaType": "org.apache.streams.twitter.TwitterTimelineProviderConfiguration", "type": "object", "required": true },
+        "elasticsearch": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration", "type": "object", "required": true }
+    }
+}
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/resources/TwitterHistoryElasticsearch.dot b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/resources/TwitterHistoryElasticsearch.dot
new file mode 100644
index 0000000..76e685e
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/main/resources/TwitterHistoryElasticsearch.dot
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  TwitterTimelineProvider [label="TwitterTimelineProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java"];
+
+  //processors
+  ActivityConverterProcessor [label="ActivityConverterProcessor",shape=box,URL="https://github.com/apache/streams/blob/master/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java"];
+  
+  //persisters
+  ElasticsearchPersistWriter [label="ElasticsearchPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+  
+  //data
+  es [label="es://{index}/{type}",shape=box];
+  
+  //stream
+  TwitterTimelineProvider -> ActivityConverterProcessor [label="ObjectNode"];
+  ActivityConverterProcessor -> ElasticsearchPersistWriter [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  
+  ElasticsearchPersistWriter -> es [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/TwitterHistoryElasticsearch.md b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/TwitterHistoryElasticsearch.md
new file mode 100644
index 0000000..09d7f5a
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/TwitterHistoryElasticsearch.md
@@ -0,0 +1,53 @@
+### TwitterHistoryElasticsearch
+
+#### Description:
+
+Retrieves as many posts from a known list of users as twitter API allows.
+
+Converts them to activities, and writes them in activity format to Elasticsearch.
+
+#### Configuration:
+
+[TwitterHistoryElasticsearch.json](TwitterHistoryElasticsearch.json "TwitterHistoryElasticsearch.json") for _
+
+##### application.conf
+
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    include "twitter.oauth.conf"
+    twitter {
+      info = [
+        18055613
+      ]
+      twitter.max_items = 1000
+    }
+    elasticsearch {
+      index = twitter_history
+      type = activity
+      forceUseConfig = true
+    }
+
+[TwitterHistoryElasticsearchIT.conf](TwitterHistoryElasticsearchIT.conf "TwitterHistoryElasticsearchIT.conf")
+
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "twitter-history-elasticsearch" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.TwitterHistoryElasticsearch
+
+#### Run (Docker):
+
+    docker run apachestreams/twitter-history-elasticsearch java -cp twitter-history-elasticsearch-jar-with-dependencies.jar -Dconfig.file=`pwd`/application.conf org.apache.streams.example.TwitterHistoryElasticsearch
+
+#### Specification:
+
+[TwitterHistoryElasticsearch.dot](TwitterHistoryElasticsearch.dot "TwitterHistoryElasticsearch.dot" )
+
+#### Diagram:
+
+![TwitterHistoryElasticsearch.dot.svg](./TwitterHistoryElasticsearch.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/index.md b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/index.md
new file mode 100644
index 0000000..a56819a
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/markdown/index.md
@@ -0,0 +1,42 @@
+### twitter-history-elasticsearch
+
+#### Requirements:
+ - Authorized Twitter API credentials
+ - A running ElasticSearch 2.0.0+ instance
+
+#### Streams:
+
+<a href="TwitterHistoryElasticsearch.html" target="_self">TwitterHistoryElasticsearch</a>
+
+#### Build:
+
+    mvn clean package
+   
+#### Test:
+
+Create a local file `application.conf` with valid twitter credentials
+
+    twitter {
+      oauth {
+        consumerKey = ""
+        consumerSecret = ""
+        accessToken = ""
+        accessTokenSecret = ""
+      }
+    }
+    
+Start up elasticsearch with docker:
+    
+    mvn -PdockerITs docker:start
+
+Build with integration testing enabled, using your credentials
+
+    mvn clean test verify -DskipITs=false -DargLine="-Dconfig.file=twitter.oauth.conf"
+
+Shutdown elasticsearch when finished:
+
+    mvn -PdockerITs docker:stop
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/site.xml b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/site.xml
new file mode 100644
index 0000000..9033276
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/site/site.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Credentials">
+            <item name="Twitter" href="../../../streams-project/credentials/twitter.html"/>
+        </menu>
+        <menu name="Services">
+            <item name="Elasticsearch" href="../../../streams-project/services/elasticsearch.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterHistoryElasticsearchIT.java b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterHistoryElasticsearchIT.java
new file mode 100644
index 0000000..0eb022b
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterHistoryElasticsearchIT.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.TwitterHistoryElasticsearch;
+import org.apache.streams.example.TwitterHistoryElasticsearchConfiguration;
+
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.testng.Assert.assertNotEquals;
+import static org.testng.AssertJUnit.assertTrue;
+
+/**
+ * Example stream that populates elasticsearch with activities from twitter userstream in real-time.
+ */
+public class TwitterHistoryElasticsearchIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterHistoryElasticsearchIT.class);
+
+  protected TwitterHistoryElasticsearchConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/TwitterHistoryElasticsearchIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(TwitterHistoryElasticsearchConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getElasticsearch()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getElasticsearch().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    if(indicesExistsResponse.isExists()) {
+      DeleteIndexRequest deleteIndexRequest = Requests.deleteIndexRequest(testConfiguration.getElasticsearch().getIndex());
+      DeleteIndexResponse deleteIndexResponse = testClient.admin().indices().delete(deleteIndexRequest).actionGet();
+      assertTrue(deleteIndexResponse.isAcknowledged());
+    };
+  }
+
+  @Test
+  public void testTwitterHistoryElasticsearch() throws Exception {
+
+    TwitterHistoryElasticsearch stream = new TwitterHistoryElasticsearch(testConfiguration);
+
+    stream.run();
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getElasticsearch().getIndex())
+        .setTypes(testConfiguration.getElasticsearch().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    assertNotEquals(count, 0);
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/resources/TwitterHistoryElasticsearchIT.conf b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/resources/TwitterHistoryElasticsearchIT.conf
new file mode 100644
index 0000000..833e038
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-history-elasticsearch/src/test/resources/TwitterHistoryElasticsearchIT.conf
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+include "../../../../../elasticsearch.properties"
+twitter {
+  info = [
+    18055613
+  ]
+  twitter.max_items = 1000
+}
+elasticsearch {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  clusterName = elasticsearch
+  index = twitter_history_elasticsearch_it
+  type = activity
+  forceUseConfig = true
+}
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/README.md b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/README.md
new file mode 100644
index 0000000..a09b345
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/README.md
@@ -0,0 +1,8 @@
+Apache Streams (incubating)
+Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
+--------------------------------------------------------------------------------
+
+org.apache.streams:twitter-userstream-elasticsearch
+===================================================
+
+[README.md](src/site/markdown/index.md "README")
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/pom.xml b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/pom.xml
new file mode 100644
index 0000000..b501354
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/pom.xml
@@ -0,0 +1,237 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.apache.streams.examples</groupId>
+        <artifactId>streams-examples-local</artifactId>
+        <version>0.5.2-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>twitter-userstream-elasticsearch</artifactId>
+    <name>twitter-userstream-elasticsearch</name>
+
+    <description>
+        Connects to an active twitter account and stores the userstream as activities in Elasticsearch
+    </description>
+
+    <properties>
+        <docker.repo>apachestreams</docker.repo>
+        <elasticsearch.version>2.4.6</elasticsearch.version>
+        <lucene.version>5.5.4</lucene.version>
+    </properties>
+
+    <dependencies>
+        <!-- Test includes -->
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-codecs</artifactId>
+            <version>${lucene.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${elasticsearch.version}</version>
+            <type>test-jar</type>
+        </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.typesafe</groupId>
+            <artifactId>config</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-config</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-runtime-local</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-filters</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-provider-twitter</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-persist-elasticsearch</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-pojo</artifactId>
+            <type>test-jar</type>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-testing</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+            <type>test-jar</type>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>src/main/java</sourceDirectory>
+        <testSourceDirectory>src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.streams.plugins</groupId>
+                <artifactId>streams-plugin-pojo</artifactId>
+                <configuration>
+                    <sourcePaths>
+                        <sourcePath>${project.basedir}/src/main/jsonschema</sourcePath>
+                    </sourcePaths>
+                    <targetDirectory>${project.basedir}/target/generated-sources/pojo</targetDirectory>
+                    <targetPackage>org.apache.streams.example.twitter</targetPackage>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>generate-sources</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-persist-elasticsearch</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                    <dependency>
+                        <groupId>org.apache.streams</groupId>
+                        <artifactId>streams-provider-twitter</artifactId>
+                        <version>${project.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-source</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                <source>target/generated-sources/pojo</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <includes>**/*.json</includes>
+                    <outputDirectory>${project.build.directory}/test-classes</outputDirectory>
+                    <includeGroupIds>org.apache.streams</includeGroupIds>
+                    <includeArtifactIds>streams-schemas-activitystreams</includeArtifactIds>
+                    <includeTypes>test-jar</includeTypes>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test-resource-dependencies</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>unpack-dependencies</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.surefire</groupId>
+                        <artifactId>surefire-testng</artifactId>
+                        <version>${failsafe.plugin.version}</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>io.fabric8</groupId>
+                <artifactId>docker-maven-plugin</artifactId>
+                <version>${docker.plugin.version}</version>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/java/org/apache/streams/example/TwitterUserstreamElasticsearch.java b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/java/org/apache/streams/example/TwitterUserstreamElasticsearch.java
new file mode 100644
index 0000000..fdfb39b
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/java/org/apache/streams/example/TwitterUserstreamElasticsearch.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.converter.ActivityConverterProcessor;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.elasticsearch.ElasticsearchPersistDeleter;
+import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
+import org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration;
+import org.apache.streams.filters.VerbDefinitionDropFilter;
+import org.apache.streams.filters.VerbDefinitionKeepFilter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.local.LocalRuntimeConfiguration;
+import org.apache.streams.local.builders.LocalStreamBuilder;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.twitter.TwitterStreamConfiguration;
+import org.apache.streams.twitter.provider.TwitterStreamProvider;
+import org.apache.streams.verbs.ObjectCombination;
+import org.apache.streams.verbs.VerbDefinition;
+
+import org.apache.commons.lang3.StringUtils;
+import org.hamcrest.MatcherAssert;
+import org.hamcrest.core.IsInstanceOf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Example stream that populates elasticsearch with activities from twitter userstream in real-time
+ */
+public class TwitterUserstreamElasticsearch implements Runnable {
+
+  public final static String STREAMS_ID = "TwitterUserstreamElasticsearch";
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterUserstreamElasticsearch.class);
+
+  /* this pattern will match any/only deletes */
+  private static VerbDefinition deleteVerbDefinition =
+      new VerbDefinition()
+          .withValue("delete")
+          .withObjects(Stream.of(new ObjectCombination()).collect(Collectors.toList()));
+
+  private TwitterUserstreamElasticsearchConfiguration config;
+
+  public TwitterUserstreamElasticsearch() {
+    this(new ComponentConfigurator<>(TwitterUserstreamElasticsearchConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()));
+
+  }
+
+  public TwitterUserstreamElasticsearch(TwitterUserstreamElasticsearchConfiguration config) {
+    this.config = config;
+  }
+
+  public static void main(String[] args)
+  {
+    LOGGER.info(StreamsConfigurator.getConfig().toString());
+
+    TwitterUserstreamElasticsearch userstream = new TwitterUserstreamElasticsearch();
+    new Thread(userstream).start();
+
+  }
+
+  @Override
+  public void run() {
+
+    TwitterStreamConfiguration twitterStreamConfiguration = config.getTwitter();
+    ElasticsearchWriterConfiguration elasticsearchWriterConfiguration = config.getElasticsearch();
+
+    TwitterStreamProvider stream = new TwitterStreamProvider(twitterStreamConfiguration);
+    ActivityConverterProcessor converter = new ActivityConverterProcessor();
+    VerbDefinitionDropFilter noDeletesProcessor = new VerbDefinitionDropFilter(Stream.of(deleteVerbDefinition).collect(Collectors.toSet()));
+    ElasticsearchPersistWriter writer = new ElasticsearchPersistWriter(elasticsearchWriterConfiguration);
+    VerbDefinitionKeepFilter deleteOnlyProcessor = new VerbDefinitionKeepFilter(Stream.of(deleteVerbDefinition).collect(Collectors.toSet()));
+    SetDeleteIdProcessor setDeleteIdProcessor = new SetDeleteIdProcessor();
+    ElasticsearchPersistDeleter deleter = new ElasticsearchPersistDeleter(elasticsearchWriterConfiguration);
+
+    LocalRuntimeConfiguration localRuntimeConfiguration =
+        StreamsJacksonMapper.getInstance().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class);
+    StreamBuilder builder = new LocalStreamBuilder(localRuntimeConfiguration);
+
+    builder.newPerpetualStream(TwitterStreamProvider.class.getCanonicalName(), stream);
+    builder.addStreamsProcessor(ActivityConverterProcessor.class.getCanonicalName(), converter, 2, TwitterStreamProvider.class.getCanonicalName());
+    builder.addStreamsProcessor(VerbDefinitionDropFilter.class.getCanonicalName(), noDeletesProcessor, 1, ActivityConverterProcessor.class.getCanonicalName());
+    builder.addStreamsPersistWriter(ElasticsearchPersistWriter.class.getCanonicalName(), writer, 1, VerbDefinitionDropFilter.class.getCanonicalName());
+    builder.addStreamsProcessor(VerbDefinitionKeepFilter.class.getCanonicalName(), deleteOnlyProcessor, 1, ActivityConverterProcessor.class.getCanonicalName());
+    builder.addStreamsProcessor(SetDeleteIdProcessor.class.getCanonicalName(), setDeleteIdProcessor, 1, VerbDefinitionKeepFilter.class.getCanonicalName());
+    builder.addStreamsPersistWriter(ElasticsearchPersistDeleter.class.getCanonicalName(), deleter, 1, SetDeleteIdProcessor.class.getCanonicalName());
+
+    builder.start();
+
+  }
+
+  protected class SetDeleteIdProcessor implements StreamsProcessor {
+
+    public String getId() {
+      return "TwitterUserstreamElasticsearch.SetDeleteIdProcessor";
+    }
+
+    @Override
+    public List<StreamsDatum> process(StreamsDatum entry) {
+
+      MatcherAssert.assertThat(entry.getDocument(), IsInstanceOf.instanceOf(Activity.class));
+      String id = entry.getId();
+      // replace delete with post in id
+      // ensure ElasticsearchPersistDeleter will remove original post if present
+      id = StringUtils.replace(id, "delete", "post");
+      entry.setId(id);
+
+      return Stream.of(entry).collect(Collectors.toList());
+    }
+
+    @Override
+    public void prepare(Object configurationObject) {
+
+
+    }
+
+    @Override
+    public void cleanUp() {
+
+    }
+  }
+
+}
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterUserstreamElasticsearchConfiguration.json b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterUserstreamElasticsearchConfiguration.json
new file mode 100644
index 0000000..7261439
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/jsonschema/org/apache/streams/example/twitter/TwitterUserstreamElasticsearchConfiguration.json
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-03/schema",
+  "$license": [
+    "http://www.apache.org/licenses/LICENSE-2.0"
+  ],
+  "type": "object",
+  "javaType" : "org.apache.streams.example.TwitterUserstreamElasticsearchConfiguration",
+  "javaInterfaces": ["java.io.Serializable"],
+  "properties": {
+    "twitter": { "javaType": "org.apache.streams.twitter.TwitterStreamConfiguration", "type": "object", "required": true },
+    "elasticsearch": { "javaType": "org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration", "type": "object", "required": true }
+  }
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/resources/TwitterUserstreamElasticsearch.dot b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/resources/TwitterUserstreamElasticsearch.dot
new file mode 100644
index 0000000..79e1707
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/main/resources/TwitterUserstreamElasticsearch.dot
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ digraph g {
+
+  //providers
+  TwitterStreamProvider [label="TwitterStreamProvider",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java"];
+
+  //processors
+  ActivityConverterProcessor [label="ActivityConverterProcessor",shape=box,URL="https://github.com/apache/streams/blob/master/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java"];
+  DeleteOnlyProcessor [label="VerbDefinitionKeepFilter (verb:delete)",shape=box,URL="https://github.com/apache/streams/blob/master/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java"];
+  NoDeletesProcessor  [label="VerbDefinitionDropFilter (verb:delete)",shape=box,URL="https://github.com/apache/streams/blob/master/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java"];
+  SetDeleteIdProcessor [label="SetDeleteIdProcessor (verb:post)",shape=box,URL="https://github.com/apache/streams-examples/blob/master/local/twitter-userstream-elasticsearch/src/main/java/org/apache/streams/twitter/example/TwitterUserstreamElasticsearch.java"];
+
+  //persisters
+  ElasticsearchPersistWriter [label="ElasticsearchPersistWriter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java"];
+  ElasticsearchPersistDeleter [label="ElasticsearchPersistDeleter",shape=ellipse,URL="https://github.com/apache/streams/blob/master/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java"];
+
+   //data
+  es [label="es://{index}/{type}",shape=box];
+
+  //stream
+  TwitterStreamProvider -> ActivityConverterProcessor [label="ObjectNode"];
+  ActivityConverterProcessor -> DeleteOnlyProcessor [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  ActivityConverterProcessor -> NoDeletesProcessor [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  DeleteOnlyProcessor -> SetDeleteIdProcessor [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  NoDeletesProcessor -> ElasticsearchPersistWriter [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  ElasticsearchPersistWriter -> es [label="Activity",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/activity.json"];
+  SetDeleteIdProcessor -> ElasticsearchPersistDeleter [label="Delete",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/verbs/delete.json"];
+  ElasticsearchPersistDeleter -> es [label="Delete",URL="https://github.com/apache/streams/blob/master/streams-pojo/src/main/jsonschema/org/apache/streams/pojo/json/delete.json"];
+}
\ No newline at end of file
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/TwitterUserstreamElasticsearch.md b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/TwitterUserstreamElasticsearch.md
new file mode 100644
index 0000000..c812749
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/TwitterUserstreamElasticsearch.md
@@ -0,0 +1,43 @@
+### TwitterUserstreamElasticsearch
+
+#### Description:
+
+This example connects to an active twitter account and stores the userstream as activities in Elasticsearch
+
+#### Configuration:
+
+[TwitterUserstreamElasticsearch.json](TwitterUserstreamElasticsearch.json "TwitterUserstreamElasticsearch.json") for _
+
+##### application.conf
+
+    include "elasticsearch.properties"
+    include "elasticsearch.conf"
+    include "twitter.oauth.conf"
+    elasticsearch {
+      index = twitter_userstream
+      type = activity
+      forceUseConfig = true
+    }
+
+#### Run (SBT):
+
+    sbtx -210 -sbt-create
+    set resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
+    set libraryDependencies += "org.apache.streams" % "twitter-userstream-elasticsearch" % "0.4-incubating-SNAPSHOT"
+    set fork := true
+    set javaOptions +="-Dconfig.file=application.conf"
+    run org.apache.streams.example.TwitterUserstreamElasticsearch
+
+#### Run (Docker):
+
+    docker run apachestreams/twitter-userstream-elasticsearch java -cp twitter-userstream-elasticsearch-jar-with-dependencies.jar -Dconfig.file=`pwd`/application.conf org.apache.streams.example.TwitterUserstreamElasticsearch
+
+#### Specification:
+
+[TwitterUserstreamElasticsearch.dot](TwitterUserstreamElasticsearch.dot "TwitterUserstreamElasticsearch.dot" )
+
+#### Diagram:
+
+![TwitterUserstreamElasticsearch.dot.svg](./TwitterUserstreamElasticsearch.dot.svg)
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/index.md b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/index.md
new file mode 100644
index 0000000..6e0b931
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/markdown/index.md
@@ -0,0 +1,31 @@
+### twitter-userstream-elasticsearch
+
+#### Requirements:
+ - Authorized Twitter API credentials
+ - A running ElasticSearch 1.0.0+ instance
+
+#### Streams:
+
+<a href="TwitterUserstreamElasticsearch.html" target="_self">TwitterUserstreamElasticsearch</a>
+
+#### Build:
+
+    mvn clean package
+
+#### Test:
+
+Start up elasticsearch with docker:
+    
+    mvn -PdockerITs docker:start
+
+Build with integration testing enabled, using your credentials
+
+    mvn clean test verify -DskipITs=false -DargLine="-Dconfig.file=twitter.oauth.conf"
+
+Shutdown elasticsearch when finished:
+
+    mvn -PdockerITs docker:stop
+
+[JavaDocs](apidocs/index.html "JavaDocs")
+
+###### Licensed under Apache License 2.0 - http://www.apache.org/licenses/LICENSE-2.0
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/site.xml b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/site.xml
new file mode 100644
index 0000000..9033276
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/site/site.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<project>
+    <body>
+        <menu name="Credentials">
+            <item name="Twitter" href="../../../streams-project/credentials/twitter.html"/>
+        </menu>
+        <menu name="Services">
+            <item name="Elasticsearch" href="../../../streams-project/services/elasticsearch.html"/>
+        </menu>
+    </body>
+</project>
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterUserstreamElasticsearchIT.java b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterUserstreamElasticsearchIT.java
new file mode 100644
index 0000000..63dd8de
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/java/org/apache/streams/example/test/TwitterUserstreamElasticsearchIT.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.example.test;
+
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.elasticsearch.ElasticsearchClientManager;
+import org.apache.streams.example.TwitterUserstreamElasticsearch;
+import org.apache.streams.example.TwitterUserstreamElasticsearchConfiguration;
+
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.cluster.health.ClusterHealthStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+import static org.testng.Assert.assertNotEquals;
+import static org.testng.AssertJUnit.assertTrue;
+
+/**
+ * Test copying documents between two indexes on same cluster
+ */
+public class TwitterUserstreamElasticsearchIT {
+
+  private final static Logger LOGGER = LoggerFactory.getLogger(TwitterUserstreamElasticsearchIT.class);
+
+  protected TwitterUserstreamElasticsearchConfiguration testConfiguration;
+  protected Client testClient;
+
+  private int count = 0;
+
+  @BeforeClass
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/TwitterUserstreamElasticsearchIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(TwitterUserstreamElasticsearchConfiguration.class).detectConfiguration(typesafe);
+    testClient = ElasticsearchClientManager.getInstance(testConfiguration.getElasticsearch()).client();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getElasticsearch().getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    if(indicesExistsResponse.isExists()) {
+      DeleteIndexRequest deleteIndexRequest = Requests.deleteIndexRequest(testConfiguration.getElasticsearch().getIndex());
+      DeleteIndexResponse deleteIndexResponse = testClient.admin().indices().delete(deleteIndexRequest).actionGet();
+      assertTrue(deleteIndexResponse.isAcknowledged());
+    };
+
+  }
+
+  @Test
+  public void testUserstreamElasticsearch() throws Exception {
+
+    TwitterUserstreamElasticsearch stream = new TwitterUserstreamElasticsearch(testConfiguration);
+
+    Thread thread = new Thread(stream);
+    thread.start();
+    thread.join(30000);
+
+    // assert lines in file
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getElasticsearch().getIndex())
+        .setTypes(testConfiguration.getElasticsearch().getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    count = (int)countResponse.getHits().getTotalHits();
+
+    assertNotEquals(count, 0);
+  }
+}
diff --git a/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/resources/TwitterUserstreamElasticsearchIT.conf b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/resources/TwitterUserstreamElasticsearchIT.conf
new file mode 100644
index 0000000..74ae3ee
--- /dev/null
+++ b/streams-examples/streams-examples-local/twitter-userstream-elasticsearch/src/test/resources/TwitterUserstreamElasticsearchIT.conf
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+include "../../../../../twitter.conf"
+include "../../../../../elasticsearch.properties"
+twitter {
+  endpoint = sample
+  track = [
+    "data"
+  ]
+}
+elasticsearch {
+  hosts += ${es.tcp.host}
+  port = ${es.tcp.port}
+  clusterName = elasticsearch
+  index = twitter_userstream_elasticsearch_it
+  type = activity
+  forceUseConfig = true
+}
+taskTimeoutMs = 60000
\ No newline at end of file

-- 
To stop receiving notification emails like this one, please contact
"commits@streams.apache.org" <co...@streams.apache.org>.